Compare commits
	
		
			309 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 3db9d58dac | ||
|  | 3fbe9c3cdd | ||
|  | 130e9c519c | ||
|  | 78c9e9745d | ||
|  | 38ebb5abf4 | ||
|  | 9b73be26ab | ||
|  | fd0095b73f | ||
|  | 226049f66a | ||
|  | dc1cf88ca6 | ||
|  | f5f8b730b5 | ||
|  | e8f6b42316 | ||
|  | 49b0d73654 | ||
|  | 394da67cf1 | ||
|  | ef7da36ac6 | ||
|  | 1312100bc7 | ||
|  | 4085bc2152 | ||
|  | f4d7e72426 | ||
|  | ece63ad071 | ||
|  | a9550b8243 | ||
|  | 43724e40b2 | ||
|  | 1bfa40e926 | ||
|  | d493f71c4e | ||
|  | 87f4d1a323 | ||
|  | 0a0e6114f5 | ||
|  | 41d36fa3bf | ||
|  | 707923e3f5 | ||
|  | d9b9581df2 | ||
|  | 463e7c66af | ||
|  | 2be28a22a7 | ||
|  | d73f0bb1af | ||
|  | ce74978b1e | ||
|  | 2b0157aecd | ||
|  | f49baf5d90 | ||
|  | 7cc964c7d8 | ||
|  | bc77322c2f | ||
|  | 8913a74a86 | ||
|  | af35b25d15 | ||
|  | 476b07af6e | ||
|  | e2b9a02531 | ||
|  | 6cc6229066 | ||
|  | 4c62a060f0 | ||
|  | 3d80637fa4 | ||
|  | 68be9fe979 | ||
|  | 547cd4a3ae | ||
|  | ee2d50b2d1 | ||
|  | 15c3ddece8 | ||
|  | beaa9744b7 | ||
|  | 8eb51790b5 | ||
|  | aadc6262ed | ||
|  | 00ae6298d4 | ||
|  | ad0669a326 | ||
|  | 85df76c623 | ||
|  | 87512246cb | ||
|  | a3f9016ae9 | ||
|  | 4e58e9f8d1 | ||
|  | 7c533394fd | ||
|  | 333e014f13 | ||
|  | c0c0efce18 | ||
|  | beabaee345 | ||
|  | c937af3919 | ||
|  | aa4a6ae023 | ||
|  | b57946ec98 | ||
|  | 1e110a2c41 | ||
|  | b234aa48e4 | ||
|  | 8086576677 | ||
|  | 03e34299f0 | ||
|  | 421e3f324f | ||
|  | a0b803959c | ||
|  | ff4d57032a | ||
|  | ba34589065 | ||
|  | a4d11eef46 | ||
|  | fda2e2b47a | ||
|  | d287f480e5 | ||
|  | d85f0e6226 | ||
|  | cfb4943986 | ||
|  | b453a96211 | ||
|  | 81f9b351b3 | ||
|  | 4bca3de42f | ||
|  | 235b1a3679 | ||
|  | 450658d7ac | ||
|  | 8e17e42e26 | ||
|  | 2d6a4c4b90 | ||
|  | 38703acc29 | ||
|  | 095217e797 | ||
|  | 86e965f854 | ||
|  | 57db68dc04 | ||
|  | 72de6d67c7 | ||
|  | b2c3acd025 | ||
|  | 605de59bd0 | ||
|  | e0565ddac5 | ||
|  | 18b68f1b80 | ||
|  | ea88806630 | ||
|  | d738462139 | ||
|  | 9490ad2bf7 | ||
|  | 705c55ce24 | ||
|  | 59fbd505a0 | ||
|  | 1cc20c9770 | ||
|  | f8f267a880 | ||
|  | 80ea1f6883 | ||
|  | 75ee282a3d | ||
|  | 4edad4601c | ||
|  | 152b51fd33 | ||
|  | 66a0fca4ad | ||
|  | e7c7a66cd1 | ||
|  | b3dbb87c3c | ||
|  | 3d45538998 | ||
|  | 8df9d3fef9 | ||
|  | 99e660c66d | ||
|  | aa02f87b69 | ||
|  | f0d1ee2cb4 | ||
|  | ca4967311d | ||
|  | 65eb6ab611 | ||
|  | 1cb2f7814c | ||
|  | b5485b16e6 | ||
|  | 62c8597a3b | ||
|  | 488604ff2e | ||
|  | bd88a17b8e | ||
|  | 8e892dccfe | ||
|  | c22eb34017 | ||
|  | dcf3edb03e | ||
|  | c85b59d3b5 | ||
|  | 1170de1e8e | ||
|  | 332bd767d4 | ||
|  | 0053b30237 | ||
|  | d44533d956 | ||
|  | 12d8bd5a22 | ||
|  | ae326678ec | ||
|  | 8d31f165c0 | ||
|  | cfd4d6a161 | ||
|  | 329f030a41 | ||
|  | 68dc2925fb | ||
|  | 0d4e61d489 | ||
|  | dc7b96a569 | ||
|  | 50882e5bb0 | ||
|  | 280a73af3b | ||
|  | d8c0631dab | ||
|  | 9166ba91d7 | ||
|  | 6bc4e602bb | ||
|  | 45a7520fc3 | ||
|  | 64c0cace85 | ||
|  | 82af5e4a19 | ||
|  | 7e0ba1b335 | ||
|  | 44b7f792fe | ||
|  | a3e432eb68 | ||
|  | 009f9a2b14 | ||
|  | 2ca905b6e5 | ||
|  | 3b099f936a | ||
|  | 4d6ddb070e | ||
|  | b205314424 | ||
|  | e83132f32c | ||
|  | 1b38309d70 | ||
|  | 6e8196d475 | ||
|  | 90fecc56dd | ||
|  | d3d7f0e670 | ||
|  | 37ffeafeff | ||
|  | abc159b7b9 | ||
|  | 648b28876d | ||
|  | 5b9f2bac87 | ||
|  | 17151f67c2 | ||
|  | 5f14d958ac | ||
|  | bd6c52e025 | ||
|  | cb77bb6b69 | ||
|  | 78b240b740 | ||
|  | 7e30f00178 | ||
|  | 35310dbc73 | ||
|  | af82c07acc | ||
|  | 3f75f30f26 | ||
|  | f7f0e10d4d | ||
|  | 091238a2cf | ||
|  | 0458ef869e | ||
|  | 0bf08db7b9 | ||
|  | d3420918cd | ||
|  | 138e759161 | ||
|  | f1d6ce7d12 | ||
|  | ff749a7a0a | ||
|  | bff78ca8dd | ||
|  | 81647d67a0 | ||
|  | d8924ed892 | ||
|  | 799cdafae6 | ||
|  | bc0c55e49a | ||
|  | c61c6a8525 | ||
|  | 3e764d068c | ||
|  | ac25f4b98b | ||
|  | aa6ff8c84a | ||
|  | 37ca79e9c5 | ||
|  | 6040b4b494 | ||
|  | 51ea3e3c6f | ||
|  | 5a16dda50d | ||
|  | bbfa978861 | ||
|  | 54ca7bf09f | ||
|  | 8bf5370b6c | ||
|  | ecefa05e03 | ||
|  | e013494fb2 | ||
|  | 4853f74dbf | ||
|  | 6f45ee6813 | ||
|  | c60ed32f3a | ||
|  | 178851589d | ||
|  | 5bcc679194 | ||
|  | 1e17b5ac66 | ||
|  | 19f12f3f2f | ||
|  | 71e8d9a490 | ||
|  | e3cd553f82 | ||
|  | b61c8cd104 | ||
|  | 8f288fe458 | ||
|  | 02a920feea | ||
|  | be2c4f2b3c | ||
|  | 7ac74b1c1f | ||
|  | 933cb1d5c7 | ||
|  | 6203e30152 | ||
|  | 7d94af0e31 | ||
|  | 564a2b5f1e | ||
|  | 1dbe7a3163 | ||
|  | 47f8a126ca | ||
|  | 693195f70b | ||
|  | 2267b7e7d7 | ||
|  | a06e605e67 | ||
|  | 47c67ecc99 | ||
|  | 4c4b7cbeae | ||
|  | ddececbfea | ||
|  | 71a6f3d1a4 | ||
|  | e86cf962e9 | ||
|  | 99a58d5c91 | ||
|  | eecbb5ca90 | ||
|  | fbb3bf869c | ||
|  | b887ea9623 | ||
|  | c68e3e1238 | ||
|  | c5080e4030 | ||
|  | 0d01365751 | ||
|  | f4a06ad65d | ||
|  | 05a22d5a54 | ||
|  | 2424ece0c5 | ||
|  | 2d02551d0a | ||
|  | ac416aeeb3 | ||
|  | d09af430e8 | ||
|  | 79454b5eed | ||
|  | 921c1fa412 | ||
|  | 1aba145bc6 | ||
|  | 290d9df3eb | ||
|  | aa76ccdd25 | ||
|  | abe8070c36 | ||
|  | 2d28c258fd | ||
|  | 1338839b52 | ||
|  | 058203a0ec | ||
|  | 8fdf664968 | ||
|  | 50555ec73e | ||
|  | 951a532a9f | ||
|  | e940044603 | ||
|  | babfbb0fcd | ||
|  | bbed312bdd | ||
|  | b593764ded | ||
|  | 483c840fc8 | ||
|  | de80f0ccff | ||
|  | d0b87f7f82 | ||
|  | bf32d3c39a | ||
|  | bc14f2cdaa | ||
|  | 06a21e038a | ||
|  | 4d5eba317e | ||
|  | 9170eea784 | ||
|  | 2769967e1e | ||
|  | 609f50d261 | ||
|  | 82f0eb1cbc | ||
|  | b47669403b | ||
|  | 91899acfe5 | ||
|  | ffedd33101 | ||
|  | af292b0ec2 | ||
|  | 1ead7f9b2b | ||
|  | 5c91877b69 | ||
|  | e57d834a0d | ||
|  | 0578cdb62e | ||
|  | b661afba01 | ||
|  | b1002dd4f9 | ||
|  | 8e69008699 | ||
|  | f45552f8f8 | ||
|  | a4fe091a51 | ||
|  | 216217e2c6 | ||
|  | 799775b3a7 | ||
|  | ae0384df29 | ||
|  | 8f57279dc7 | ||
|  | e8dbd12f22 | ||
|  | ca230d28b4 | ||
|  | c96065b187 | ||
|  | 2abcf4764d | ||
|  | 6a4c342e45 | ||
|  | bb0b1e88ef | ||
|  | 63c9135184 | ||
|  | 7fac0ef961 | ||
|  | 5a2e268160 | ||
|  | a4e4e8f440 | ||
|  | b62ce947a6 | ||
|  | 9538662262 | ||
|  | 09d7ae4f80 | ||
|  | d7ded366c7 | ||
|  | 09c77973a0 | ||
|  | 22f3c70234 | ||
|  | 6527b1386f | ||
|  | baabf97acd | ||
|  | 97005aca66 | ||
|  | 6e8ea50c19 | ||
|  | 1fcd706e11 | ||
|  | 008bb19b0b | ||
|  | 023acab779 | ||
|  | 68e8584520 | ||
|  | 5d120ebca0 | ||
|  | f91b89f723 | ||
|  | 1181b75e16 | ||
|  | 5f00b4f923 | ||
|  | 4c31193b82 | ||
|  | 17fc9d1886 | ||
|  | d7285d43dd | 
							
								
								
									
										11
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										11
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -1,8 +1,15 @@ | |||||||
| .* |  | ||||||
| !.gitignore | !.gitignore | ||||||
| *~ | *~ | ||||||
| *.py[co] | *.py[co] | ||||||
| .*.sw[po] | .*.sw[po] | ||||||
|  | .cache/ | ||||||
|  | .coverage | ||||||
|  | .coveragerc | ||||||
|  | .env | ||||||
|  | .idea/ | ||||||
|  | .pytest_cache/ | ||||||
|  | .tox/ | ||||||
|  | .eggs/ | ||||||
| *.egg | *.egg | ||||||
| docs/.build | docs/.build | ||||||
| docs/_build | docs/_build | ||||||
| @@ -13,8 +20,6 @@ env/ | |||||||
| .settings | .settings | ||||||
| .project | .project | ||||||
| .pydevproject | .pydevproject | ||||||
| tests/test_bugfix.py |  | ||||||
| htmlcov/ | htmlcov/ | ||||||
| venv | venv | ||||||
| venv3 | venv3 | ||||||
| scratchpad |  | ||||||
|   | |||||||
| @@ -1,24 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| sudo apt-get remove mongodb-org-server |  | ||||||
| sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10 |  | ||||||
|  |  | ||||||
| if [ "$MONGODB" = "3.4" ]; then |  | ||||||
|     sudo apt-key adv --keyserver keyserver.ubuntu.com:80 --recv 0C49F3730359A14518585931BC711F9BA15703C6 |  | ||||||
|     echo "deb http://repo.mongodb.org/apt/ubuntu trusty/mongodb-org/3.4 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.4.list |  | ||||||
|     sudo apt-get update |  | ||||||
|     sudo apt-get install mongodb-org-server=3.4.17 |  | ||||||
|     # service should be started automatically |  | ||||||
| elif [ "$MONGODB" = "3.6" ]; then |  | ||||||
|     sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 2930ADAE8CAF5059EE73BB4B58712A2291FA4AD5 |  | ||||||
|     echo "deb http://repo.mongodb.org/apt/ubuntu trusty/mongodb-org/3.6 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.6.list |  | ||||||
|     sudo apt-get update |  | ||||||
|     sudo apt-get install mongodb-org-server=3.6.12 |  | ||||||
|     # service should be started automatically |  | ||||||
| else |  | ||||||
|     echo "Invalid MongoDB version, expected 2.6, 3.0, 3.2, 3.4 or 3.6." |  | ||||||
|     exit 1 |  | ||||||
| fi; |  | ||||||
|  |  | ||||||
| mkdir db |  | ||||||
| 1>db/logs mongod --dbpath=db & |  | ||||||
| @@ -5,17 +5,12 @@ pylint: | |||||||
|  |  | ||||||
|     options: |     options: | ||||||
|         additional-builtins: |         additional-builtins: | ||||||
|             # add xrange and long as valid built-ins. In Python 3, xrange is |             # add long as valid built-ins. | ||||||
|             # translated into range and long is translated into int via 2to3 (see |  | ||||||
|             # "use_2to3" in setup.py). This should be removed when we drop Python |  | ||||||
|             # 2 support (which probably won't happen any time soon). |  | ||||||
|             - xrange |  | ||||||
|             - long |             - long | ||||||
|  |  | ||||||
| pyflakes: | pyflakes: | ||||||
|     disable: |     disable: | ||||||
|         # undefined variables are already covered by pylint (and exclude |         # undefined variables are already covered by pylint (and exclude long) | ||||||
|         # xrange & long) |  | ||||||
|         - F821 |         - F821 | ||||||
|  |  | ||||||
| ignore-paths: | ignore-paths: | ||||||
|   | |||||||
							
								
								
									
										12
									
								
								.pre-commit-config.yaml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								.pre-commit-config.yaml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,12 @@ | |||||||
|  | fail_fast: false | ||||||
|  | repos: | ||||||
|  |     - repo: https://github.com/ambv/black | ||||||
|  |       rev: 19.10b0 | ||||||
|  |       hooks: | ||||||
|  |         - id: black | ||||||
|  |     - repo: https://gitlab.com/pycqa/flake8 | ||||||
|  |       rev: 3.8.0a2 | ||||||
|  |       hooks: | ||||||
|  |         - id: flake8 | ||||||
|  |           additional_dependencies: | ||||||
|  |             - flake8-import-order | ||||||
							
								
								
									
										92
									
								
								.travis.yml
									
									
									
									
									
								
							
							
						
						
									
										92
									
								
								.travis.yml
									
									
									
									
									
								
							| @@ -1,13 +1,10 @@ | |||||||
| # For full coverage, we'd have to test all supported Python, MongoDB, and | # For full coverage, we'd have to test all supported Python, MongoDB, and | ||||||
| # PyMongo combinations. However, that would result in an overly long build | # PyMongo combinations. However, that would result in an overly long build | ||||||
| # with a very large number of jobs, hence we only test a subset of all the | # with a very large number of jobs, hence we only test a subset of all the | ||||||
| # combinations: | # combinations. | ||||||
| # * MongoDB v3.4 & the latest PyMongo v3.x is currently the "main" setup, | # * Python3.7, MongoDB v3.4 & the latest PyMongo v3.x is currently the "main" setup, | ||||||
| #   tested against Python v2.7, v3.5, v3.6, and PyPy. | # Other combinations are tested. See below for the details or check the travis jobs | ||||||
| # * Besides that, we test the lowest actively supported Python/MongoDB/PyMongo |  | ||||||
| #   combination: MongoDB v3.4, PyMongo v3.4, Python v2.7. |  | ||||||
| # * MongoDB v3.6 is tested against Python v3.6, and PyMongo v3.6, v3.7, v3.8. |  | ||||||
| # |  | ||||||
| # We should periodically check MongoDB Server versions supported by MongoDB | # We should periodically check MongoDB Server versions supported by MongoDB | ||||||
| # Inc., add newly released versions to the test matrix, and remove versions | # Inc., add newly released versions to the test matrix, and remove versions | ||||||
| # which have reached their End of Life. See: | # which have reached their End of Life. See: | ||||||
| @@ -17,57 +14,68 @@ | |||||||
| # Reminder: Update README.rst if you change MongoDB versions we test. | # Reminder: Update README.rst if you change MongoDB versions we test. | ||||||
|  |  | ||||||
| language: python | language: python | ||||||
|  | dist: xenial | ||||||
| python: | python: | ||||||
| - 2.7 |  | ||||||
| - 3.5 | - 3.5 | ||||||
| - 3.6 | - 3.6 | ||||||
| - pypy | - 3.7 | ||||||
|  | - 3.8 | ||||||
|  | - pypy3 | ||||||
|  |  | ||||||
| env: | env: | ||||||
| - MONGODB=3.4 PYMONGO=3.x |   global: | ||||||
|  |     - MONGODB_3_4=3.4.17 | ||||||
|  |     - MONGODB_3_6=3.6.12 | ||||||
|  |     - MONGODB_4_0=4.0.13 | ||||||
|  |  | ||||||
|  |     - PYMONGO_3_4=3.4 | ||||||
|  |     - PYMONGO_3_6=3.6 | ||||||
|  |     - PYMONGO_3_9=3.9 | ||||||
|  |     - PYMONGO_3_10=3.10 | ||||||
|  |  | ||||||
|  |     - MAIN_PYTHON_VERSION=3.7 | ||||||
|  |   matrix: | ||||||
|  |     - MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_10} | ||||||
|  |  | ||||||
| matrix: | matrix: | ||||||
|   # Finish the build as soon as one job fails |   # Finish the build as soon as one job fails | ||||||
|   fast_finish: true |   fast_finish: true | ||||||
|  |  | ||||||
|   include: |   include: | ||||||
|   - python: 2.7 |   - python: 3.7 | ||||||
|     env: MONGODB=3.4 PYMONGO=3.4.x |     env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_6} | ||||||
|   - python: 3.6 |   - python: 3.7 | ||||||
|     env: MONGODB=3.6 PYMONGO=3.x |     env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_9} | ||||||
|  |   - python: 3.7 | ||||||
| before_install: |     env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_10} | ||||||
| - bash .install_mongodb_on_travis.sh |   - python: 3.8 | ||||||
| - sleep 20  # https://docs.travis-ci.com/user/database-setup/#mongodb-does-not-immediately-accept-connections |     env: MONGODB=${MONGODB_4_0} PYMONGO=${PYMONGO_3_10} | ||||||
| - mongo --eval 'db.version();' |  | ||||||
|  |  | ||||||
| install: | install: | ||||||
| - sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev |   # Install Mongo | ||||||
|   libtiff4-dev libjpeg8-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.5-dev tk8.5-dev |   - wget http://fastdl.mongodb.org/linux/mongodb-linux-x86_64-${MONGODB}.tgz | ||||||
|   python-tk |   - tar xzf mongodb-linux-x86_64-${MONGODB}.tgz | ||||||
| - travis_retry pip install --upgrade pip |   - ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --version | ||||||
| - travis_retry pip install coveralls |   # Install Python dependencies. | ||||||
| - travis_retry pip install flake8 flake8-import-order |   - pip install --upgrade pip | ||||||
| - travis_retry pip install "tox"   # tox 3.11.0 has requirement virtualenv>=14.0.0 |   - pip install coveralls | ||||||
| - travis_retry pip install "virtualenv"  # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32) |   - pip install pre-commit | ||||||
| - travis_retry tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test |   - pip install tox | ||||||
|  |   # tox dryrun to setup the tox venv (we run a mock test). | ||||||
|  |   - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "-k=test_ci_placeholder" | ||||||
|  |  | ||||||
| # Cache dependencies installed via pip |  | ||||||
| cache: pip |  | ||||||
|  |  | ||||||
| # Run flake8 for py27 |  | ||||||
| before_script: | before_script: | ||||||
| - if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi |   - mkdir ${PWD}/mongodb-linux-x86_64-${MONGODB}/data | ||||||
|  |   - ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --dbpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/data --logpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/mongodb.log --fork | ||||||
|  |   # Run pre-commit hooks (black, flake8, etc) on entire codebase | ||||||
|  |   - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then pre-commit run -a; else echo "pre-commit checks only runs on py37"; fi | ||||||
|  |   - mongo --eval 'db.version();'    # Make sure mongo is awake | ||||||
|  |  | ||||||
| script: | script: | ||||||
| - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage |   - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "--cov=mongoengine" | ||||||
|  |  | ||||||
| # For now only submit coveralls for Python v2.7. Python v3.x currently shows |  | ||||||
| # 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible |  | ||||||
| # code in a separate dir and runs tests on that. |  | ||||||
| after_success: | after_success: | ||||||
| - if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then coveralls --verbose; fi | - - if [[ $TRAVIS_PYTHON_VERSION == $MAIN_PYTHON_VERSION ]]; then coveralls --verbose; else echo "coveralls only sent for py37"; fi | ||||||
|  |  | ||||||
| notifications: | notifications: | ||||||
|   irc: irc.freenode.org#mongoengine |   irc: irc.freenode.org#mongoengine | ||||||
| @@ -89,11 +97,11 @@ deploy: | |||||||
|   distributions: "sdist bdist_wheel" |   distributions: "sdist bdist_wheel" | ||||||
|  |  | ||||||
|   # Only deploy on tagged commits (aka GitHub releases) and only for the parent |   # Only deploy on tagged commits (aka GitHub releases) and only for the parent | ||||||
|   # repo's builds running Python v2.7 along with PyMongo v3.x and MongoDB v3.4. |   # repo's builds running Python v3.7 along with PyMongo v3.x and MongoDB v3.4. | ||||||
|   # We run Travis against many different Python, PyMongo, and MongoDB versions |   # We run Travis against many different Python, PyMongo, and MongoDB versions | ||||||
|   # and we don't want the deploy to occur multiple times). |   # and we don't want the deploy to occur multiple times). | ||||||
|   on: |   on: | ||||||
|     tags: true |     tags: true | ||||||
|     repo: MongoEngine/mongoengine |     repo: MongoEngine/mongoengine | ||||||
|     condition: ($PYMONGO = 3.x) && ($MONGODB = 3.4) |     condition: ($PYMONGO = ${PYMONGO_3_10}) && ($MONGODB = ${MONGODB_3_4}) | ||||||
|     python: 2.7 |     python: 3.7 | ||||||
|   | |||||||
							
								
								
									
										5
									
								
								AUTHORS
									
									
									
									
									
								
							
							
						
						
									
										5
									
								
								AUTHORS
									
									
									
									
									
								
							| @@ -252,3 +252,8 @@ that much better: | |||||||
|  * Paulo Amaral (https://github.com/pauloAmaral) |  * Paulo Amaral (https://github.com/pauloAmaral) | ||||||
|  * Gaurav Dadhania (https://github.com/GVRV) |  * Gaurav Dadhania (https://github.com/GVRV) | ||||||
|  * Yurii Andrieiev (https://github.com/yandrieiev) |  * Yurii Andrieiev (https://github.com/yandrieiev) | ||||||
|  |  * Filip Kucharczyk (https://github.com/Pacu2) | ||||||
|  |  * Eric Timmons (https://github.com/daewok) | ||||||
|  |  * Matthew Simpson (https://github.com/mcsimps2) | ||||||
|  |  * Leonardo Domingues (https://github.com/leodmgs) | ||||||
|  |  * Agustin Barto (https://github.com/abarto) | ||||||
|   | |||||||
| @@ -20,23 +20,43 @@ post to the `user group <http://groups.google.com/group/mongoengine-users>` | |||||||
| Supported Interpreters | Supported Interpreters | ||||||
| ---------------------- | ---------------------- | ||||||
|  |  | ||||||
| MongoEngine supports CPython 2.7 and newer. Language | MongoEngine supports CPython 3.5 and newer as well as Pypy3. | ||||||
| features not supported by all interpreters can not be used. | Language features not supported by all interpreters can not be used. | ||||||
| The codebase is written in python 2 so you must be using python 2 |  | ||||||
| when developing new features. Compatibility of the library with Python 3 | Python3 codebase | ||||||
| relies on the 2to3 package that gets executed as part of the installation | ---------------------- | ||||||
| build. You should ensure that your code is properly converted by |  | ||||||
| `2to3 <http://docs.python.org/library/2to3.html>`_. | Since 0.20, the codebase is exclusively Python 3. | ||||||
|  |  | ||||||
|  | Earlier versions were exclusively Python2, and were relying on 2to3 to support Python3 installs. | ||||||
|  | Travis runs the tests against the main Python 3.x versions. | ||||||
|  |  | ||||||
|  |  | ||||||
| Style Guide | Style Guide | ||||||
| ----------- | ----------- | ||||||
|  |  | ||||||
| MongoEngine aims to follow `PEP8 <http://www.python.org/dev/peps/pep-0008/>`_ | MongoEngine's codebase is formatted with `black <https://github.com/python/black>`_, other tools like | ||||||
| including 4 space indents. When possible we try to stick to 79 character line | flake8 are also used. Those tools will run as part of the CI and will fail in case the code is not formatted properly. | ||||||
| limits. However, screens got bigger and an ORM has a strong focus on |  | ||||||
| readability and if it can help, we accept 119 as maximum line length, in a | To install all development tools, simply run the following commands: | ||||||
| similar way as `django does |  | ||||||
| <https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/coding-style/#python-style>`_ | .. code-block:: console | ||||||
|  |  | ||||||
|  |     $ python -m pip install -r requirements-dev.txt | ||||||
|  |  | ||||||
|  |  | ||||||
|  | You can install `pre-commit <https://pre-commit.com/>`_ into your git hooks, | ||||||
|  | to automatically check and fix any formatting issue before creating a | ||||||
|  | git commit. | ||||||
|  |  | ||||||
|  | To enable ``pre-commit`` simply run: | ||||||
|  |  | ||||||
|  | .. code-block:: console | ||||||
|  |  | ||||||
|  |     $ pre-commit install | ||||||
|  |  | ||||||
|  | See the ``.pre-commit-config.yaml`` configuration file for more information | ||||||
|  | on how it works. | ||||||
|  |  | ||||||
| Testing | Testing | ||||||
| ------- | ------- | ||||||
| @@ -58,7 +78,7 @@ General Guidelines | |||||||
|   should adapt to the breaking change in docs/upgrade.rst. |   should adapt to the breaking change in docs/upgrade.rst. | ||||||
| - Write inline documentation for new classes and methods. | - Write inline documentation for new classes and methods. | ||||||
| - Write tests and make sure they pass (make sure you have a mongod | - Write tests and make sure they pass (make sure you have a mongod | ||||||
|   running on the default port, then execute ``python setup.py nosetests`` |   running on the default port, then execute ``python setup.py test`` | ||||||
|   from the cmd line to run the test suite). |   from the cmd line to run the test suite). | ||||||
| - Ensure tests pass on all supported Python, PyMongo, and MongoDB versions. | - Ensure tests pass on all supported Python, PyMongo, and MongoDB versions. | ||||||
|   You can test various Python and PyMongo versions locally by executing |   You can test various Python and PyMongo versions locally by executing | ||||||
|   | |||||||
							
								
								
									
										37
									
								
								README.rst
									
									
									
									
									
								
							
							
						
						
									
										37
									
								
								README.rst
									
									
									
									
									
								
							| @@ -26,15 +26,15 @@ an `API reference <https://mongoengine-odm.readthedocs.io/apireference.html>`_. | |||||||
|  |  | ||||||
| Supported MongoDB Versions | Supported MongoDB Versions | ||||||
| ========================== | ========================== | ||||||
| MongoEngine is currently tested against MongoDB v3.4 and v3.6. Future versions | MongoEngine is currently tested against MongoDB v3.4, v3.6 and v4.0. Future versions | ||||||
| should be supported as well, but aren't actively tested at the moment. Make | should be supported as well, but aren't actively tested at the moment. Make | ||||||
| sure to open an issue or submit a pull request if you experience any problems | sure to open an issue or submit a pull request if you experience any problems | ||||||
| with MongoDB version > 3.6. | with MongoDB version > 4.0. | ||||||
|  |  | ||||||
| Installation | Installation | ||||||
| ============ | ============ | ||||||
| We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of | We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of | ||||||
| `pip <https://pip.pypa.io/>`_. You can then use ``pip install -U mongoengine``. | `pip <https://pip.pypa.io/>`_. You can then use ``python -m pip install -U mongoengine``. | ||||||
| You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ | You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ | ||||||
| and thus you can use ``easy_install -U mongoengine``. Another option is | and thus you can use ``easy_install -U mongoengine``. Another option is | ||||||
| `pipenv <https://docs.pipenv.org/>`_. You can then use ``pipenv install mongoengine`` | `pipenv <https://docs.pipenv.org/>`_. You can then use ``pipenv install mongoengine`` | ||||||
| @@ -42,13 +42,14 @@ to both create the virtual environment and install the package. Otherwise, you c | |||||||
| download the source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and | download the source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and | ||||||
| run ``python setup.py install``. | run ``python setup.py install``. | ||||||
|  |  | ||||||
|  | The support for Python2 was dropped with MongoEngine 0.20.0 | ||||||
|  |  | ||||||
| Dependencies | Dependencies | ||||||
| ============ | ============ | ||||||
| All of the dependencies can easily be installed via `pip <https://pip.pypa.io/>`_. | All of the dependencies can easily be installed via `python -m pip <https://pip.pypa.io/>`_. | ||||||
| At the very least, you'll need these two packages to use MongoEngine: | At the very least, you'll need these two packages to use MongoEngine: | ||||||
|  |  | ||||||
| - pymongo>=3.5 | - pymongo>=3.4 | ||||||
| - six>=1.10.0 |  | ||||||
|  |  | ||||||
| If you utilize a ``DateTimeField``, you might also use a more flexible date parser: | If you utilize a ``DateTimeField``, you might also use a more flexible date parser: | ||||||
|  |  | ||||||
| @@ -58,6 +59,10 @@ If you need to use an ``ImageField`` or ``ImageGridFsProxy``: | |||||||
|  |  | ||||||
| - Pillow>=2.0.0 | - Pillow>=2.0.0 | ||||||
|  |  | ||||||
|  | If you need to use signals: | ||||||
|  |  | ||||||
|  | - blinker>=1.3 | ||||||
|  |  | ||||||
| Examples | Examples | ||||||
| ======== | ======== | ||||||
| Some simple examples of what MongoEngine code looks like: | Some simple examples of what MongoEngine code looks like: | ||||||
| @@ -91,12 +96,11 @@ Some simple examples of what MongoEngine code looks like: | |||||||
|  |  | ||||||
|     # Iterate over all posts using the BlogPost superclass |     # Iterate over all posts using the BlogPost superclass | ||||||
|     >>> for post in BlogPost.objects: |     >>> for post in BlogPost.objects: | ||||||
|     ...     print '===', post.title, '===' |     ...     print('===', post.title, '===') | ||||||
|     ...     if isinstance(post, TextPost): |     ...     if isinstance(post, TextPost): | ||||||
|     ...         print post.content |     ...         print(post.content) | ||||||
|     ...     elif isinstance(post, LinkPost): |     ...     elif isinstance(post, LinkPost): | ||||||
|     ...         print 'Link:', post.url |     ...         print('Link:', post.url) | ||||||
|     ...     print |  | ||||||
|     ... |     ... | ||||||
|  |  | ||||||
|     # Count all blog posts and its subtypes |     # Count all blog posts and its subtypes | ||||||
| @@ -116,7 +120,8 @@ Some simple examples of what MongoEngine code looks like: | |||||||
| Tests | Tests | ||||||
| ===== | ===== | ||||||
| To run the test suite, ensure you are running a local instance of MongoDB on | To run the test suite, ensure you are running a local instance of MongoDB on | ||||||
| the standard port and have ``nose`` installed. Then, run ``python setup.py nosetests``. | the standard port and have ``pytest`` installed. Then, run ``python setup.py test`` | ||||||
|  | or simply ``pytest``. | ||||||
|  |  | ||||||
| To run the test suite on every supported Python and PyMongo version, you can | To run the test suite on every supported Python and PyMongo version, you can | ||||||
| use ``tox``. You'll need to make sure you have each supported Python version | use ``tox``. You'll need to make sure you have each supported Python version | ||||||
| @@ -125,20 +130,18 @@ installed in your environment and then: | |||||||
| .. code-block:: shell | .. code-block:: shell | ||||||
|  |  | ||||||
|     # Install tox |     # Install tox | ||||||
|     $ pip install tox |     $ python -m pip install tox | ||||||
|     # Run the test suites |     # Run the test suites | ||||||
|     $ tox |     $ tox | ||||||
|  |  | ||||||
| If you wish to run a subset of tests, use the nosetests convention: | If you wish to run a subset of tests, use the pytest convention: | ||||||
|  |  | ||||||
| .. code-block:: shell | .. code-block:: shell | ||||||
|  |  | ||||||
|     # Run all the tests in a particular test file |     # Run all the tests in a particular test file | ||||||
|     $ python setup.py nosetests --tests tests/fields/fields.py |     $ pytest tests/fields/test_fields.py | ||||||
|     # Run only particular test class in that file |     # Run only particular test class in that file | ||||||
|     $ python setup.py nosetests --tests tests/fields/fields.py:FieldTest |     $ pytest tests/fields/test_fields.py::TestField | ||||||
|     # Use the -s option if you want to print some debug statements or use pdb |  | ||||||
|     $ python setup.py nosetests --tests tests/fields/fields.py:FieldTest -s |  | ||||||
|  |  | ||||||
| Community | Community | ||||||
| ========= | ========= | ||||||
|   | |||||||
							
								
								
									
										207
									
								
								benchmark.py
									
									
									
									
									
								
							
							
						
						
									
										207
									
								
								benchmark.py
									
									
									
									
									
								
							| @@ -1,207 +0,0 @@ | |||||||
| #!/usr/bin/env python |  | ||||||
|  |  | ||||||
| """ |  | ||||||
| Simple benchmark comparing PyMongo and MongoEngine. |  | ||||||
|  |  | ||||||
| Sample run on a mid 2015 MacBook Pro (commit b282511): |  | ||||||
|  |  | ||||||
| Benchmarking... |  | ||||||
| ---------------------------------------------------------------------------------------------------- |  | ||||||
| Creating 10000 dictionaries - Pymongo |  | ||||||
| 2.58979988098 |  | ||||||
| ---------------------------------------------------------------------------------------------------- |  | ||||||
| Creating 10000 dictionaries - Pymongo write_concern={"w": 0} |  | ||||||
| 1.26657605171 |  | ||||||
| ---------------------------------------------------------------------------------------------------- |  | ||||||
| Creating 10000 dictionaries - MongoEngine |  | ||||||
| 8.4351580143 |  | ||||||
| ---------------------------------------------------------------------------------------------------- |  | ||||||
| Creating 10000 dictionaries without continual assign - MongoEngine |  | ||||||
| 7.20191693306 |  | ||||||
| ---------------------------------------------------------------------------------------------------- |  | ||||||
| Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True |  | ||||||
| 6.31104588509 |  | ||||||
| ---------------------------------------------------------------------------------------------------- |  | ||||||
| Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True |  | ||||||
| 6.07083487511 |  | ||||||
| ---------------------------------------------------------------------------------------------------- |  | ||||||
| Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False |  | ||||||
| 5.97704291344 |  | ||||||
| ---------------------------------------------------------------------------------------------------- |  | ||||||
| Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False |  | ||||||
| 5.9111430645 |  | ||||||
| """ |  | ||||||
|  |  | ||||||
| import timeit |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def main(): |  | ||||||
|     print("Benchmarking...") |  | ||||||
|  |  | ||||||
|     setup = """ |  | ||||||
| from pymongo import MongoClient |  | ||||||
| connection = MongoClient() |  | ||||||
| connection.drop_database('timeit_test') |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| from pymongo import MongoClient |  | ||||||
| connection = MongoClient() |  | ||||||
|  |  | ||||||
| db = connection.timeit_test |  | ||||||
| noddy = db.noddy |  | ||||||
|  |  | ||||||
| for i in range(10000): |  | ||||||
|     example = {'fields': {}} |  | ||||||
|     for j in range(20): |  | ||||||
|         example['fields']['key' + str(j)] = 'value ' + str(j) |  | ||||||
|  |  | ||||||
|     noddy.save(example) |  | ||||||
|  |  | ||||||
| myNoddys = noddy.find() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print("-" * 100) |  | ||||||
|     print("""Creating 10000 dictionaries - Pymongo""") |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print(t.timeit(1)) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| from pymongo import MongoClient |  | ||||||
| from pymongo.write_concern import WriteConcern |  | ||||||
| connection = MongoClient() |  | ||||||
|  |  | ||||||
| db = connection.get_database('timeit_test', write_concern=WriteConcern(w=0)) |  | ||||||
| noddy = db.noddy |  | ||||||
|  |  | ||||||
| for i in range(10000): |  | ||||||
|     example = {'fields': {}} |  | ||||||
|     for j in range(20): |  | ||||||
|         example['fields']["key"+str(j)] = "value "+str(j) |  | ||||||
|  |  | ||||||
|     noddy.save(example) |  | ||||||
|  |  | ||||||
| myNoddys = noddy.find() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print("-" * 100) |  | ||||||
|     print("""Creating 10000 dictionaries - Pymongo write_concern={"w": 0}""") |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print(t.timeit(1)) |  | ||||||
|  |  | ||||||
|     setup = """ |  | ||||||
| from pymongo import MongoClient |  | ||||||
| connection = MongoClient() |  | ||||||
| connection.drop_database('timeit_test') |  | ||||||
| connection.close() |  | ||||||
|  |  | ||||||
| from mongoengine import Document, DictField, connect |  | ||||||
| connect('timeit_test') |  | ||||||
|  |  | ||||||
| class Noddy(Document): |  | ||||||
|     fields = DictField() |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in range(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save() |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print("-" * 100) |  | ||||||
|     print("""Creating 10000 dictionaries - MongoEngine""") |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print(t.timeit(1)) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in range(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     fields = {} |  | ||||||
|     for j in range(20): |  | ||||||
|         fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.fields = fields |  | ||||||
|     noddy.save() |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print("-" * 100) |  | ||||||
|     print("""Creating 10000 dictionaries without continual assign - MongoEngine""") |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print(t.timeit(1)) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in range(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save(write_concern={"w": 0}, cascade=True) |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print("-" * 100) |  | ||||||
|     print("""Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True""") |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print(t.timeit(1)) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in range(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save(write_concern={"w": 0}, validate=False, cascade=True) |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print("-" * 100) |  | ||||||
|     print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True""") |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print(t.timeit(1)) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in range(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save(validate=False, write_concern={"w": 0}) |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print("-" * 100) |  | ||||||
|     print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False""") |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print(t.timeit(1)) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in range(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save(force_insert=True, write_concern={"w": 0}, validate=False) |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print("-" * 100) |  | ||||||
|     print("""Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False""") |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print(t.timeit(1)) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == "__main__": |  | ||||||
|     main() |  | ||||||
							
								
								
									
										142
									
								
								benchmarks/test_basic_doc_ops.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										142
									
								
								benchmarks/test_basic_doc_ops.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,142 @@ | |||||||
|  | from timeit import repeat | ||||||
|  |  | ||||||
|  | import mongoengine | ||||||
|  | from mongoengine import ( | ||||||
|  |     BooleanField, | ||||||
|  |     Document, | ||||||
|  |     EmailField, | ||||||
|  |     EmbeddedDocument, | ||||||
|  |     EmbeddedDocumentField, | ||||||
|  |     IntField, | ||||||
|  |     ListField, | ||||||
|  |     StringField, | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | mongoengine.connect(db="mongoengine_benchmark_test") | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def timeit(f, n=10000): | ||||||
|  |     return min(repeat(f, repeat=3, number=n)) / float(n) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_basic(): | ||||||
|  |     class Book(Document): | ||||||
|  |         name = StringField() | ||||||
|  |         pages = IntField() | ||||||
|  |         tags = ListField(StringField()) | ||||||
|  |         is_published = BooleanField() | ||||||
|  |         author_email = EmailField() | ||||||
|  |  | ||||||
|  |     Book.drop_collection() | ||||||
|  |  | ||||||
|  |     def init_book(): | ||||||
|  |         return Book( | ||||||
|  |             name="Always be closing", | ||||||
|  |             pages=100, | ||||||
|  |             tags=["self-help", "sales"], | ||||||
|  |             is_published=True, | ||||||
|  |             author_email="alec@example.com", | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     print("Doc initialization: %.3fus" % (timeit(init_book, 1000) * 10 ** 6)) | ||||||
|  |  | ||||||
|  |     b = init_book() | ||||||
|  |     print("Doc getattr: %.3fus" % (timeit(lambda: b.name, 10000) * 10 ** 6)) | ||||||
|  |  | ||||||
|  |     print( | ||||||
|  |         "Doc setattr: %.3fus" | ||||||
|  |         % (timeit(lambda: setattr(b, "name", "New name"), 10000) * 10 ** 6) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     print("Doc to mongo: %.3fus" % (timeit(b.to_mongo, 1000) * 10 ** 6)) | ||||||
|  |  | ||||||
|  |     print("Doc validation: %.3fus" % (timeit(b.validate, 1000) * 10 ** 6)) | ||||||
|  |  | ||||||
|  |     def save_book(): | ||||||
|  |         b._mark_as_changed("name") | ||||||
|  |         b._mark_as_changed("tags") | ||||||
|  |         b.save() | ||||||
|  |  | ||||||
|  |     print("Save to database: %.3fus" % (timeit(save_book, 100) * 10 ** 6)) | ||||||
|  |  | ||||||
|  |     son = b.to_mongo() | ||||||
|  |     print( | ||||||
|  |         "Load from SON: %.3fus" % (timeit(lambda: Book._from_son(son), 1000) * 10 ** 6) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     print( | ||||||
|  |         "Load from database: %.3fus" % (timeit(lambda: Book.objects[0], 100) * 10 ** 6) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     def create_and_delete_book(): | ||||||
|  |         b = init_book() | ||||||
|  |         b.save() | ||||||
|  |         b.delete() | ||||||
|  |  | ||||||
|  |     print( | ||||||
|  |         "Init + save to database + delete: %.3fms" | ||||||
|  |         % (timeit(create_and_delete_book, 10) * 10 ** 3) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_big_doc(): | ||||||
|  |     class Contact(EmbeddedDocument): | ||||||
|  |         name = StringField() | ||||||
|  |         title = StringField() | ||||||
|  |         address = StringField() | ||||||
|  |  | ||||||
|  |     class Company(Document): | ||||||
|  |         name = StringField() | ||||||
|  |         contacts = ListField(EmbeddedDocumentField(Contact)) | ||||||
|  |  | ||||||
|  |     Company.drop_collection() | ||||||
|  |  | ||||||
|  |     def init_company(): | ||||||
|  |         return Company( | ||||||
|  |             name="MongoDB, Inc.", | ||||||
|  |             contacts=[ | ||||||
|  |                 Contact(name="Contact %d" % x, title="CEO", address="Address %d" % x) | ||||||
|  |                 for x in range(1000) | ||||||
|  |             ], | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     company = init_company() | ||||||
|  |     print("Big doc to mongo: %.3fms" % (timeit(company.to_mongo, 100) * 10 ** 3)) | ||||||
|  |  | ||||||
|  |     print("Big doc validation: %.3fms" % (timeit(company.validate, 1000) * 10 ** 3)) | ||||||
|  |  | ||||||
|  |     company.save() | ||||||
|  |  | ||||||
|  |     def save_company(): | ||||||
|  |         company._mark_as_changed("name") | ||||||
|  |         company._mark_as_changed("contacts") | ||||||
|  |         company.save() | ||||||
|  |  | ||||||
|  |     print("Save to database: %.3fms" % (timeit(save_company, 100) * 10 ** 3)) | ||||||
|  |  | ||||||
|  |     son = company.to_mongo() | ||||||
|  |     print( | ||||||
|  |         "Load from SON: %.3fms" | ||||||
|  |         % (timeit(lambda: Company._from_son(son), 100) * 10 ** 3) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     print( | ||||||
|  |         "Load from database: %.3fms" | ||||||
|  |         % (timeit(lambda: Company.objects[0], 100) * 10 ** 3) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     def create_and_delete_company(): | ||||||
|  |         c = init_company() | ||||||
|  |         c.save() | ||||||
|  |         c.delete() | ||||||
|  |  | ||||||
|  |     print( | ||||||
|  |         "Init + save to database + delete: %.3fms" | ||||||
|  |         % (timeit(create_and_delete_company, 10) * 10 ** 3) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | if __name__ == "__main__": | ||||||
|  |     test_basic() | ||||||
|  |     print("-" * 100) | ||||||
|  |     test_big_doc() | ||||||
							
								
								
									
										161
									
								
								benchmarks/test_inserts.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										161
									
								
								benchmarks/test_inserts.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,161 @@ | |||||||
|  | import timeit | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def main(): | ||||||
|  |     setup = """ | ||||||
|  | from pymongo import MongoClient | ||||||
|  |  | ||||||
|  | connection = MongoClient() | ||||||
|  | connection.drop_database('mongoengine_benchmark_test') | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | from pymongo import MongoClient | ||||||
|  |  | ||||||
|  | connection = MongoClient() | ||||||
|  |  | ||||||
|  | db = connection.mongoengine_benchmark_test | ||||||
|  | noddy = db.noddy | ||||||
|  |  | ||||||
|  | for i in range(10000): | ||||||
|  |     example = {'fields': {}} | ||||||
|  |     for j in range(20): | ||||||
|  |         example['fields']["key"+str(j)] = "value "+str(j) | ||||||
|  |  | ||||||
|  |     noddy.insert_one(example) | ||||||
|  |  | ||||||
|  | myNoddys = noddy.find() | ||||||
|  | [n for n in myNoddys]  # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print("PyMongo: Creating 10000 dictionaries.") | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print("{}s".format(t.timeit(1))) | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | from pymongo import MongoClient, WriteConcern | ||||||
|  | connection = MongoClient() | ||||||
|  |  | ||||||
|  | db = connection.mongoengine_benchmark_test | ||||||
|  | noddy = db.noddy.with_options(write_concern=WriteConcern(w=0)) | ||||||
|  |  | ||||||
|  | for i in range(10000): | ||||||
|  |     example = {'fields': {}} | ||||||
|  |     for j in range(20): | ||||||
|  |         example['fields']["key"+str(j)] = "value "+str(j) | ||||||
|  |  | ||||||
|  |     noddy.insert_one(example) | ||||||
|  |  | ||||||
|  | myNoddys = noddy.find() | ||||||
|  | [n for n in myNoddys]  # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print('PyMongo: Creating 10000 dictionaries (write_concern={"w": 0}).') | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print("{}s".format(t.timeit(1))) | ||||||
|  |  | ||||||
|  |     setup = """ | ||||||
|  | from pymongo import MongoClient | ||||||
|  |  | ||||||
|  | connection = MongoClient() | ||||||
|  | connection.drop_database('mongoengine_benchmark_test') | ||||||
|  | connection.close() | ||||||
|  |  | ||||||
|  | from mongoengine import Document, DictField, connect | ||||||
|  | connect("mongoengine_benchmark_test") | ||||||
|  |  | ||||||
|  | class Noddy(Document): | ||||||
|  |     fields = DictField() | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save() | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys]  # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print("MongoEngine: Creating 10000 dictionaries.") | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print("{}s".format(t.timeit(1))) | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     fields = {} | ||||||
|  |     for j in range(20): | ||||||
|  |         fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.fields = fields | ||||||
|  |     noddy.save() | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys]  # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print("MongoEngine: Creating 10000 dictionaries (using a single field assignment).") | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print("{}s".format(t.timeit(1))) | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save(write_concern={"w": 0}) | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}).') | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print("{}s".format(t.timeit(1))) | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save(write_concern={"w": 0}, validate=False) | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print( | ||||||
|  |         'MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}, validate=False).' | ||||||
|  |     ) | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print("{}s".format(t.timeit(1))) | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save(force_insert=True, write_concern={"w": 0}, validate=False) | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print("-" * 100) | ||||||
|  |     print( | ||||||
|  |         'MongoEngine: Creating 10000 dictionaries (force_insert=True, write_concern={"w": 0}, validate=False).' | ||||||
|  |     ) | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print("{}s".format(t.timeit(1))) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | if __name__ == "__main__": | ||||||
|  |     main() | ||||||
| @@ -1,3 +1,4 @@ | |||||||
|  |  | ||||||
| ========= | ========= | ||||||
| Changelog | Changelog | ||||||
| ========= | ========= | ||||||
| @@ -6,201 +7,256 @@ Development | |||||||
| =========== | =========== | ||||||
| - (Fill this out as you fix issues and develop your features). | - (Fill this out as you fix issues and develop your features). | ||||||
|  |  | ||||||
|  | Changes in 0.20.0 | ||||||
|  | ================= | ||||||
|  | - ATTENTION: Drop support for Python2 | ||||||
|  | - Add Mongo 4.0 to Travis | ||||||
|  | - Fix error when setting a string as a ComplexDateTimeField #2253 | ||||||
|  | - Bump development Status classifier to Production/Stable #2232 | ||||||
|  | - Improve Queryset.get to avoid confusing MultipleObjectsReturned message in case multiple match are found #630 | ||||||
|  | - Fixed a bug causing inaccurate query results, while combining ``__raw__`` and regular filters for the same field #2264 | ||||||
|  | - Add support for the `elemMatch` projection operator in .fields() (e.g BlogPost.objects.fields(elemMatch__comments="test")) #2267 | ||||||
|  | - DictField validate failed without default connection (bug introduced in 0.19.0) #2239 | ||||||
|  | - Remove methods that were deprecated years ago: | ||||||
|  |     - name parameter in Field constructor e.g `StringField(name="...")`, was replaced by db_field | ||||||
|  |     - Queryset.slave_okay() was deprecated since pymongo3 | ||||||
|  |     - dropDups was dropped with MongoDB3 | ||||||
|  |     - ``Queryset._ensure_indexes`` and ``Queryset.ensure_indexes``, the right method to use is ``Document.ensure_indexes`` | ||||||
|  | - Added pre-commit for development/CI #2212 | ||||||
|  | - Renamed requirements-lint.txt to requirements-dev.txt #2212 | ||||||
|  | - Support for setting ReadConcern #2255 | ||||||
|  |  | ||||||
|  | Changes in 0.19.1 | ||||||
|  | ================= | ||||||
|  | - Requires Pillow < 7.0.0 as it dropped Python2 support | ||||||
|  | - DEPRECATION: The interface of ``QuerySet.aggregate`` method was changed, it no longer takes an unpacked list of | ||||||
|  |     pipeline steps (*pipeline) but simply takes the pipeline list just like ``pymongo.Collection.aggregate`` does. #2079 | ||||||
|  |  | ||||||
|  | Changes in 0.19.0 | ||||||
|  | ================= | ||||||
|  | - BREAKING CHANGE: ``class_check`` and ``read_preference`` keyword arguments are no longer available when filtering a ``QuerySet``. #2112 | ||||||
|  |     - Instead of ``Doc.objects(foo=bar, read_preference=...)`` use ``Doc.objects(foo=bar).read_preference(...)``. | ||||||
|  |     - Instead of ``Doc.objects(foo=bar, class_check=False)`` use ``Doc.objects(foo=bar).clear_cls_query(...)``. | ||||||
|  |     - This change also renames the private ``QuerySet._initial_query`` attribute to ``_cls_query``. | ||||||
|  | - BREAKING CHANGE: Removed the deprecated ``format`` param from ``QuerySet.explain``. #2113 | ||||||
|  | - BREAKING CHANGE: Renamed ``MongoEngineConnectionError`` to ``ConnectionFailure``. #2111 | ||||||
|  |     - If you catch/use ``MongoEngineConnectionError`` in your code, you'll have to rename it. | ||||||
|  | - BREAKING CHANGE: Positional arguments when instantiating a document are no longer supported. #2103 | ||||||
|  |     - From now on keyword arguments (e.g. ``Doc(field_name=value)``) are required. | ||||||
|  | - BREAKING CHANGE: A ``LazyReferenceField`` is now stored in the ``_data`` field of its parent as a ``DBRef``, ``Document``, or ``EmbeddedDocument`` (``ObjectId`` is no longer allowed). #2182 | ||||||
|  | - DEPRECATION: ``Q.empty`` & ``QNode.empty`` are marked as deprecated and will be removed in a next version of MongoEngine. #2210 | ||||||
|  |     - Added ability to check if Q or QNode are empty by parsing them to bool. | ||||||
|  |     - Instead of ``Q(name="John").empty`` use ``not Q(name="John")``. | ||||||
|  | - Fix updating/modifying/deleting/reloading a document that's sharded by a field with ``db_field`` specified. #2125 | ||||||
|  | - Only set no_cursor_timeout when requested (fixes an incompatibility with MongoDB 4.2) #2148 | ||||||
|  | - ``ListField`` now accepts an optional ``max_length`` parameter. #2110 | ||||||
|  | - Improve error message related to InvalidDocumentError #2180 | ||||||
|  | - Added BulkWriteError to replace NotUniqueError which was misleading in bulk write insert #2152 | ||||||
|  | - Added ability to compare Q and Q operations #2204 | ||||||
|  | - Added ability to use a db alias on query_counter #2194 | ||||||
|  | - Added ability to specify collations for querysets with ``Doc.objects.collation`` #2024 | ||||||
|  | - Fix updates of a list field by negative index #2094 | ||||||
|  | - Switch from nosetest to pytest as test runner #2114 | ||||||
|  | - The codebase is now formatted using ``black``. #2109 | ||||||
|  | - Documentation improvements: | ||||||
|  |     - Documented how `pymongo.monitoring` can be used to log all queries issued by MongoEngine to the driver. | ||||||
|  |  | ||||||
|  | Changes in 0.18.2 | ||||||
|  | ================= | ||||||
|  | - Replace deprecated PyMongo v2.x methods with their v3.x equivalents in the ``SequenceField``. #2097 | ||||||
|  | - Various code clarity and documentation improvements. | ||||||
|  |  | ||||||
|  | Changes in 0.18.1 | ||||||
|  | ================= | ||||||
|  | - Fix a bug introduced in 0.18.0 which was causing ``Document.save`` to update all the fields instead of updating only the modified fields. This bug only occurred when using a custom PK. #2082 | ||||||
|  | - Add Python 3.7 to Travis CI. #2058 | ||||||
|  |  | ||||||
| Changes in 0.18.0 | Changes in 0.18.0 | ||||||
| ================= | ================= | ||||||
| - Drop support for EOL'd MongoDB v2.6, v3.0, and v3.2. | - Drop support for EOL'd MongoDB v2.6, v3.0, and v3.2. | ||||||
| - MongoEngine now requires PyMongo >= v3.4. Travis CI now tests against MongoDB v3.4 – v3.6 and PyMongo v3.4 – v3.6 (#2017 #2066). | - MongoEngine now requires PyMongo >= v3.4. Travis CI now tests against MongoDB v3.4 – v3.6 and PyMongo v3.4 – v3.6. #2017 #2066 | ||||||
| - Improve performance by avoiding a call to `to_mongo` in `Document.save()` #2049 | - Improve performance by avoiding a call to ``to_mongo`` in ``Document.save``. #2049 | ||||||
| - Connection/disconnection improvements: | - Connection/disconnection improvements: | ||||||
|     - Expose `mongoengine.connection.disconnect` and `mongoengine.connection.disconnect_all` |     - Expose ``mongoengine.connection.disconnect`` and ``mongoengine.connection.disconnect_all``. | ||||||
|     - Fix disconnecting #566 #1599 #605 #607 #1213 #565 |     - Fix disconnecting. #566 #1599 #605 #607 #1213 #565 | ||||||
|     - Improve documentation of `connect`/`disconnect` |     - Improve documentation of ``connect``/``disconnect``. | ||||||
|     - Fix issue when using multiple connections to the same mongo with different credentials #2047 |     - Fix issue when using multiple connections to the same mongo with different credentials. #2047 | ||||||
|     - `connect` fails immediately when db name contains invalid characters #2031 #1718 |     - ``connect`` fails immediately when db name contains invalid characters. #2031 #1718 | ||||||
| - Fix the default write concern of `Document.save` that was overwriting the connection write concern #568 | - Fix the default write concern of ``Document.save`` that was overwriting the connection write concern. #568 | ||||||
| - Fix querying on `List(EmbeddedDocument)` subclasses fields #1961 #1492 | - Fix querying on ``List(EmbeddedDocument)`` subclasses fields. #1961 #1492 | ||||||
| - Fix querying on `(Generic)EmbeddedDocument` subclasses fields #475 | - Fix querying on ``(Generic)EmbeddedDocument`` subclasses fields. #475 | ||||||
| - Fix `QuerySet.aggregate` so that it takes limit and skip value into account #2029 | - Fix ``QuerySet.aggregate`` so that it takes limit and skip value into account. #2029 | ||||||
| - Generate unique indices for `SortedListField` and `EmbeddedDocumentListFields` #2020 | - Generate unique indices for ``SortedListField`` and ``EmbeddedDocumentListFields``. #2020 | ||||||
| - BREAKING CHANGE: Changed the behavior of a custom field validator (i.e `validation` parameter of a `Field`). It is now expected to raise a `ValidationError` instead of returning True/False #2050 | - BREAKING CHANGE: Changed the behavior of a custom field validator (i.e ``validation`` parameter of a ``Field``). It is now expected to raise a ``ValidationError`` instead of returning ``True``/``False``. #2050 | ||||||
| - BREAKING CHANGES (associated with connect/disconnect fixes): | - BREAKING CHANGES (associated with connection/disconnection fixes): | ||||||
|     - Calling `connect` 2 times with the same alias and different parameter will raise an error (should call `disconnect` first). |     - Calling ``connect`` 2 times with the same alias and different parameter will raise an error (should call ``disconnect`` first). | ||||||
|     - `disconnect` now clears `mongoengine.connection._connection_settings`. |     - ``disconnect`` now clears ``mongoengine.connection._connection_settings``. | ||||||
|     - `disconnect` now clears the cached attribute `Document._collection`. |     - ``disconnect`` now clears the cached attribute ``Document._collection``. | ||||||
| - BREAKING CHANGE: `EmbeddedDocument.save` & `.reload` is no longier exist #1552 | - BREAKING CHANGE: ``EmbeddedDocument.save`` & ``.reload`` no longer exist. #1552 | ||||||
|  |  | ||||||
| Changes in 0.17.0 | Changes in 0.17.0 | ||||||
| ================= | ================= | ||||||
| - Fix .only() working improperly after using .count() of the same instance of QuerySet | - POTENTIAL BREAKING CHANGE: All result fields are now passed, including internal fields (``_cls``, ``_id``) when using ``QuerySet.as_pymongo``. #1976 | ||||||
| - Fix batch_size that was not copied when cloning a queryset object #2011 | - Document a BREAKING CHANGE introduced in 0.15.3 and not reported at that time. #1995 | ||||||
| - POTENTIAL BREAKING CHANGE: All result fields are now passed, including internal fields (_cls, _id) when using `QuerySet.as_pymongo` #1976 | - DEPRECATION: ``EmbeddedDocument.save`` & ``.reload`` are marked as deprecated and will be removed in a next version of MongoEngine. #1552 | ||||||
| - Document a BREAKING CHANGE introduced in 0.15.3 and not reported at that time (#1995) | - Fix ``QuerySet.only`` working improperly after using ``QuerySet.count`` of the same instance of a ``QuerySet``. | ||||||
| - Fix InvalidStringData error when using modify on a BinaryField #1127 | - Fix ``batch_size`` that was not copied when cloning a ``QuerySet`` object. #2011 | ||||||
| - DEPRECATION: `EmbeddedDocument.save` & `.reload` are marked as deprecated and will be removed in a next version of mongoengine #1552 | - Fix ``InvalidStringData`` error when using ``modify`` on a ``BinaryField``. #1127 | ||||||
| - Fix test suite and CI to support MongoDB 3.4 #1445 | - Fix test suite and CI to support MongoDB v3.4. #1445 | ||||||
| - Fix reference fields querying the database on each access if value contains orphan DBRefs | - Fix reference fields querying the database on each access if value contains orphan DBRefs. | ||||||
|  |  | ||||||
| ================= |  | ||||||
| Changes in 0.16.3 | Changes in 0.16.3 | ||||||
| ================= | ================= | ||||||
| - Fix $push with $position operator not working with lists in embedded document #1965 | - Fix ``$push`` with the ``$position`` operator not working with lists in embedded documents. #1965 | ||||||
|  |  | ||||||
| ================= |  | ||||||
| Changes in 0.16.2 | Changes in 0.16.2 | ||||||
| ================= | ================= | ||||||
| - Fix .save() that fails when called with write_concern=None (regression of 0.16.1) #1958 | - Fix ``Document.save`` that fails when called with ``write_concern=None`` (regression of 0.16.1). #1958 | ||||||
|  |  | ||||||
| ================= |  | ||||||
| Changes in 0.16.1 | Changes in 0.16.1 | ||||||
| ================= | ================= | ||||||
| - Fix `_cls` that is not set properly in Document constructor (regression) #1950 | - Fix ``_cls`` that is not set properly in the ``Document`` constructor (regression). #1950 | ||||||
| - Fix bug in _delta method - Update of a ListField depends on an unrelated dynamic field update #1733 | - Fix a bug in the ``_delta`` method - update of a ``ListField`` depends on an unrelated dynamic field update. #1733 | ||||||
| - Remove deprecated `save()` method and used `insert_one()` #1899 | - Remove PyMongo's deprecated ``Collection.save`` method and use ``Collection.insert_one`` instead. #1899 | ||||||
|  |  | ||||||
| ================= |  | ||||||
| Changes in 0.16.0 | Changes in 0.16.0 | ||||||
| ================= | ================= | ||||||
| - Various improvements to the doc |  | ||||||
| - Improvement to code quality |  | ||||||
| - POTENTIAL BREAKING CHANGES: | - POTENTIAL BREAKING CHANGES: | ||||||
|     - EmbeddedDocumentField will no longer accept references to Document classes in its constructor #1661 |     - ``EmbeddedDocumentField`` will no longer accept references to Document classes in its constructor. #1661 | ||||||
|     - Get rid of the `basecls` parameter from the DictField constructor (dead code) #1876 |     - Get rid of the ``basecls`` parameter from the ``DictField`` constructor (dead code). #1876 | ||||||
|     - default value of ComplexDateTime is now None (and no longer the current datetime) #1368 |     - Default value of the ``ComplexDateTime`` field is now ``None`` (and no longer the current datetime). #1368 | ||||||
| - Fix unhashable TypeError when referencing a Document with a compound key in an EmbeddedDocument #1685 | - Fix an unhashable ``TypeError`` when referencing a ``Document`` with a compound key in an ``EmbeddedDocument``. #1685 | ||||||
| - Fix bug where an EmbeddedDocument with the same id as its parent would not be tracked for changes #1768 | - Fix a bug where an ``EmbeddedDocument`` with the same id as its parent would not be tracked for changes. #1768 | ||||||
| - Fix the fact that bulk `insert()` was not setting primary keys of inserted documents instances #1919 | - Fix the fact that a bulk ``QuerySet.insert`` was not setting primary keys of inserted document instances. #1919 | ||||||
| - Fix bug when referencing the abstract class in a ReferenceField #1920 | - Fix a bug when referencing an abstract class in a ``ReferenceField``. #1920 | ||||||
| - Allow modification to the document made in pre_save_post_validation to be taken into account #1202 | - Allow modifications to the document made in ``pre_save_post_validation`` to be taken into account. #1202 | ||||||
| - Replaced MongoDB 2.4 tests in CI by MongoDB 3.2 #1903 | - Replace MongoDB v2.4 tests in Travis CI with MongoDB v3.2. #1903 | ||||||
| - Fix side effects of using queryset.`no_dereference` on other documents #1677 | - Fix side effects of using ``QuerySet.no_dereference`` on other documents. #1677 | ||||||
| - Fix TypeError when using lazy django translation objects as translated choices #1879 | - Fix ``TypeError`` when using lazy Django translation objects as translated choices. #1879 | ||||||
| - Improve 2-3 codebase compatibility #1889 | - Improve Python 2-3 codebase compatibility. #1889 | ||||||
| - Fix the support for changing the default value of ComplexDateTime #1368 | - Fix support for changing the default value of the ``ComplexDateTime`` field. #1368 | ||||||
| - Improves error message in case an EmbeddedDocumentListField receives an EmbeddedDocument instance | - Improve error message in case an ``EmbeddedDocumentListField`` receives an ``EmbeddedDocument`` instance instead of a list. #1877 | ||||||
|     instead of a list #1877 | - Fix the ``inc`` and ``dec`` operators for the ``DecimalField``. #1517 #1320 | ||||||
| - Fix the Decimal operator inc/dec #1517 #1320 | - Ignore ``killcursors`` queries in ``query_counter`` context manager. #1869 | ||||||
| - Ignore killcursors queries in `query_counter` context manager #1869 | - Fix the fact that ``query_counter`` was modifying the initial profiling level in case it was != 0. #1870 | ||||||
| - Fix the fact that `query_counter` was modifying the initial profiling_level in case it was != 0 #1870 | - Repair the ``no_sub_classes`` context manager + fix the fact that it was swallowing exceptions. #1865 | ||||||
| - Repaired the `no_sub_classes` context manager + fix the fact that it was swallowing exceptions #1865 | - Fix index creation error that was swallowed by ``hasattr`` under Python 2. #1688 | ||||||
| - Fix index creation error that was swallowed by hasattr under python2 #1688 | - ``QuerySet.limit`` function behaviour: Passing 0 as parameter will return all the documents in the cursor. #1611 | ||||||
| - QuerySet limit function behaviour: Passing 0 as parameter will return all the documents in the cursor #1611 | - Bulk insert updates the IDs of the input documents instances. #1919 | ||||||
| - bulk insert updates the ids of the input documents instances #1919 | - Fix a harmless bug related to ``GenericReferenceField`` where modifications in the generic-referenced document were tracked in the parent. #1934 | ||||||
| - Fix an harmless bug related to GenericReferenceField where modifications in the generic-referenced document | - Improve validation of the ``BinaryField``. #273 | ||||||
|     were tracked in the parent #1934 | - Implement lazy regex compiling in Field classes to improve ``import mongoengine`` performance. #1806 | ||||||
| - Improve validator of BinaryField #273 | - Update ``GridFSProxy.__str__``  so that it would always print both the filename and grid_id. #710 | ||||||
| - Implemented lazy regex compiling in Field classes to improve 'import mongoengine' performance #1806 | - Add ``__repr__`` to ``Q`` and ``QCombination`` classes. #1843 | ||||||
| - Updated GridFSProxy.__str__  so that it would always print both the filename and grid_id #710 | - Fix bug in the ``BaseList.__iter__`` operator (was occuring when modifying a BaseList while iterating over it). #1676 | ||||||
| - Add __repr__ to Q and QCombination #1843 | - Add a ``DateField``. #513 | ||||||
| - fix bug in BaseList.__iter__ operator (was occuring when modifying a BaseList while iterating over it) #1676 | - Various improvements to the documentation. | ||||||
| - Added field `DateField`#513 | - Various code quality improvements. | ||||||
|  |  | ||||||
| Changes in 0.15.3 | Changes in 0.15.3 | ||||||
| ================= | ================= | ||||||
| -  BREAKING CHANGES: `Queryset.update/update_one` methods now returns an UpdateResult when `full_result=True` is provided and no longer a dict (relates to #1491) | - ``Queryset.update/update_one`` methods now return an ``UpdateResult`` when ``full_result=True`` is provided and no longer a dict. #1491 | ||||||
| -  Subfield resolve error in generic_emdedded_document query #1651 #1652 | - Improve ``LazyReferenceField`` and ``GenericLazyReferenceField`` with nested fields. #1704 | ||||||
| -  use each modifier only with $position #1673 #1675 | - Fix the subfield resolve error in ``generic_emdedded_document`` query. #1651 #1652 | ||||||
| -  Improve LazyReferenceField and GenericLazyReferenceField with nested fields #1704 | - Use each modifier only with ``$position``. #1673 #1675 | ||||||
| -  Fix validation error instance in GenericEmbeddedDocumentField #1067 | - Fix validation errors in the ``GenericEmbeddedDocumentField``. #1067 | ||||||
| -  Update cached fields when fields argument is given #1712 | - Update cached fields when a ``fields`` argument is given. #1712 | ||||||
| -  Add a db parameter to register_connection for compatibility with connect | - Add a ``db`` parameter to ``register_connection`` for compatibility with ``connect``. | ||||||
| -  Use insert_one, insert_many in Document.insert #1491 | - Use PyMongo v3.x's ``insert_one`` and ``insert_many`` in ``Document.insert``. #1491 | ||||||
| -  Use new update_one, update_many on document/queryset update #1491 | - Use PyMongo v3.x's ``update_one`` and ``update_many`` in ``Document.update`` and ``QuerySet.update``. #1491 | ||||||
| -  Use insert_one, insert_many in Document.insert #1491 | - Fix how ``reload(fields)`` affects changed fields. #1371 | ||||||
| -  Fix reload(fields) affect changed fields #1371 | - Fix a bug where the read-only access to the database fails when trying to create indexes. #1338 | ||||||
| -  Fix Read-only access to database fails when trying to create indexes #1338 |  | ||||||
|  |  | ||||||
| Changes in 0.15.0 | Changes in 0.15.0 | ||||||
| ================= | ================= | ||||||
| - Add LazyReferenceField and GenericLazyReferenceField to address #1230 | - Add ``LazyReferenceField`` and ``GenericLazyReferenceField``. #1230 | ||||||
|  |  | ||||||
| Changes in 0.14.1 | Changes in 0.14.1 | ||||||
| ================= | ================= | ||||||
| - Removed SemiStrictDict and started using a regular dict for `BaseDocument._data` #1630 | - Remove ``SemiStrictDict`` and start using a regular dict for ``BaseDocument._data``. #1630 | ||||||
| - Added support for the `$position` param in the `$push` operator #1566 | - Add support for the ``$position`` param in the ``$push`` operator. #1566 | ||||||
| - Fixed `DateTimeField` interpreting an empty string as today #1533 | - Fix ``DateTimeField`` interpreting an empty string as today. #1533 | ||||||
| - Added a missing `__ne__` method to the `GridFSProxy` class #1632 | - Add a missing ``__ne__`` method to the ``GridFSProxy`` class. #1632 | ||||||
| - Fixed `BaseQuerySet._fields_to_db_fields` #1553 | - Fix ``BaseQuerySet._fields_to_db_fields``. #1553 | ||||||
|  |  | ||||||
| Changes in 0.14.0 | Changes in 0.14.0 | ||||||
| ================= | ================= | ||||||
| - BREAKING CHANGE: Removed the `coerce_types` param from `QuerySet.as_pymongo` #1549 | - BREAKING CHANGE: Remove the ``coerce_types`` param from ``QuerySet.as_pymongo``. #1549 | ||||||
| - POTENTIAL BREAKING CHANGE: Made EmbeddedDocument not hashable by default #1528 | - POTENTIAL BREAKING CHANGE: Make ``EmbeddedDocument`` not hashable by default. #1528 | ||||||
| - Improved code quality #1531, #1540, #1541, #1547 | - Improve code quality. #1531, #1540, #1541, #1547 | ||||||
|  |  | ||||||
| Changes in 0.13.0 | Changes in 0.13.0 | ||||||
| ================= | ================= | ||||||
| - POTENTIAL BREAKING CHANGE: Added Unicode support to the `EmailField`, see | - POTENTIAL BREAKING CHANGE: Added Unicode support to the ``EmailField``, see docs/upgrade.rst for details. | ||||||
|   docs/upgrade.rst for details. |  | ||||||
|  |  | ||||||
| Changes in 0.12.0 | Changes in 0.12.0 | ||||||
| ================= | ================= | ||||||
| - POTENTIAL BREAKING CHANGE: Fixed limit/skip/hint/batch_size chaining #1476 | - POTENTIAL BREAKING CHANGE: Fix ``limit``/``skip``/``hint``/``batch_size`` chaining. #1476 | ||||||
| - POTENTIAL BREAKING CHANGE: Changed a public `QuerySet.clone_into` method to a private `QuerySet._clone_into` #1476 | - POTENTIAL BREAKING CHANGE: Change a public ``QuerySet.clone_into`` method to a private ``QuerySet._clone_into``. #1476 | ||||||
| - Fixed the way `Document.objects.create` works with duplicate IDs #1485 | - Fix the way ``Document.objects.create`` works with duplicate IDs. #1485 | ||||||
| - Fixed connecting to a replica set with PyMongo 2.x #1436 | - Fix connecting to a replica set with PyMongo 2.x. #1436 | ||||||
| - Fixed using sets in field choices #1481 | - Fix using sets in field choices. #1481 | ||||||
| - Fixed deleting items from a `ListField` #1318 | - Fix deleting items from a ``ListField``. #1318 | ||||||
| - Fixed an obscure error message when filtering by `field__in=non_iterable`. #1237 | - Fix an obscure error message when filtering by ``field__in=non_iterable``. #1237 | ||||||
| - Fixed behavior of a `dec` update operator #1450 | - Fix behavior of a ``dec`` update operator. #1450 | ||||||
| - Added a `rename` update operator #1454 | - Add a ``rename`` update operator. #1454 | ||||||
| - Added validation for the `db_field` parameter #1448 | - Add validation for the ``db_field`` parameter. #1448 | ||||||
| - Fixed the error message displayed when querying an `EmbeddedDocumentField` by an invalid value #1440 | - Fix the error message displayed when querying an ``EmbeddedDocumentField`` by an invalid value. #1440 | ||||||
| - Fixed the error message displayed when validating unicode URLs #1486 | - Fix the error message displayed when validating Unicode URLs. #1486 | ||||||
| - Raise an error when trying to save an abstract document #1449 | - Raise an error when trying to save an abstract document. #1449 | ||||||
|  |  | ||||||
| Changes in 0.11.0 | Changes in 0.11.0 | ||||||
| ================= | ================= | ||||||
| - BREAKING CHANGE: Renamed `ConnectionError` to `MongoEngineConnectionError` since the former is a built-in exception name in Python v3.x. #1428 | - BREAKING CHANGE: Rename ``ConnectionError`` to ``MongoEngineConnectionError`` since the former is a built-in exception name in Python v3.x. #1428 | ||||||
| - BREAKING CHANGE: Dropped Python 2.6 support. #1428 | - BREAKING CHANGE: Drop Python v2.6 support. #1428 | ||||||
| - BREAKING CHANGE: `from mongoengine.base import ErrorClass` won't work anymore for any error from `mongoengine.errors` (e.g. `ValidationError`). Use `from mongoengine.errors import ErrorClass instead`. #1428 | - BREAKING CHANGE: ``from mongoengine.base import ErrorClass`` won't work anymore for any error from ``mongoengine.errors`` (e.g. ``ValidationError``). Use ``from mongoengine.errors import ErrorClass instead``. #1428 | ||||||
| - BREAKING CHANGE: Accessing a broken reference will raise a `DoesNotExist` error. In the past it used to return `None`. #1334 | - BREAKING CHANGE: Accessing a broken reference will raise a ``DoesNotExist`` error. In the past it used to return ``None``. #1334 | ||||||
| - Fixed absent rounding for DecimalField when `force_string` is set. #1103 | - Fix absent rounding for the ``DecimalField`` when ``force_string`` is set. #1103 | ||||||
|  |  | ||||||
| Changes in 0.10.8 | Changes in 0.10.8 | ||||||
| ================= | ================= | ||||||
| - Added support for QuerySet.batch_size (#1426) | - Add support for ``QuerySet.batch_size``. (#1426) | ||||||
| - Fixed query set iteration within iteration #1427 | - Fix a query set iteration within an iteration. #1427 | ||||||
| - Fixed an issue where specifying a MongoDB URI host would override more information than it should #1421 | - Fix an issue where specifying a MongoDB URI host would override more information than it should. #1421 | ||||||
| - Added ability to filter the generic reference field by ObjectId and DBRef #1425 | - Add an ability to filter the ``GenericReferenceField`` by an ``ObjectId`` and a ``DBRef``. #1425 | ||||||
| - Fixed delete cascade for models with a custom primary key field #1247 | - Fix cascading deletes for models with a custom primary key field. #1247 | ||||||
| - Added ability to specify an authentication mechanism (e.g. X.509) #1333 | - Add ability to specify an authentication mechanism (e.g. X.509). #1333 | ||||||
| - Added support for falsey primary keys (e.g. doc.pk = 0) #1354 | - Add support for falsy primary keys (e.g. ``doc.pk = 0``). #1354 | ||||||
| - Fixed QuerySet#sum/average for fields w/ explicit db_field #1417 | - Fix ``QuerySet.sum/average`` for fields w/ an explicit ``db_field``. #1417 | ||||||
| - Fixed filtering by embedded_doc=None #1422 | - Fix filtering by ``embedded_doc=None``. #1422 | ||||||
| - Added support for cursor.comment #1420 | - Add support for ``Cursor.comment``. #1420 | ||||||
| - Fixed doc.get_<field>_display #1419 | - Fix ``doc.get_<field>_display`` methods. #1419 | ||||||
| - Fixed __repr__ method of the StrictDict #1424 | - Fix the ``__repr__`` method of the ``StrictDict`` #1424 | ||||||
| - Added a deprecation warning for Python 2.6 | - Add a deprecation warning for Python v2.6. | ||||||
|  |  | ||||||
| Changes in 0.10.7 | Changes in 0.10.7 | ||||||
| ================= | ================= | ||||||
| - Dropped Python 3.2 support #1390 | - Drop Python 3.2 support #1390 | ||||||
| - Fixed the bug where dynamic doc has index inside a dict field #1278 | - Fix a bug where a dynamic doc has an index inside a dict field. #1278 | ||||||
| - Fixed: ListField minus index assignment does not work #1128 | - Fix: ``ListField`` minus index assignment does not work. #1128 | ||||||
| - Fixed cascade delete mixing among collections #1224 | - Fix cascade delete mixing among collections. #1224 | ||||||
| - Add `signal_kwargs` argument to `Document.save`, `Document.delete` and `BaseQuerySet.insert` to be passed to signals calls #1206 | - Add ``signal_kwargs`` argument to ``Document.save``, ``Document.delete`` and ``BaseQuerySet.insert`` to be passed to signals calls. #1206 | ||||||
| - Raise `OperationError` when trying to do a `drop_collection` on document with no collection set. | - Raise ``OperationError`` when trying to do a ``drop_collection`` on document with no collection set. | ||||||
| - count on ListField of EmbeddedDocumentField fails. #1187 | - Fix a bug where a count on ``ListField`` of ``EmbeddedDocumentField`` fails. #1187 | ||||||
| - Fixed long fields stored as int32 in Python 3. #1253 | - Fix ``LongField`` values stored as int32 in Python 3. #1253 | ||||||
| - MapField now handles unicodes keys correctly. #1267 | - ``MapField`` now handles unicode keys correctly. #1267 | ||||||
| - ListField now handles negative indicies correctly. #1270 | - ``ListField`` now handles negative indicies correctly. #1270 | ||||||
| - Fixed AttributeError when initializing EmbeddedDocument with positional args. #681 | - Fix an ``AttributeError`` when initializing an ``EmbeddedDocument`` with positional args. #681 | ||||||
| - Fixed no_cursor_timeout error with pymongo 3.0+ #1304 | - Fix a ``no_cursor_timeout`` error with PyMongo v3.x. #1304 | ||||||
| - Replaced map-reduce based QuerySet.sum/average with aggregation-based implementations #1336 | - Replace map-reduce based ``QuerySet.sum/average`` with aggregation-based implementations. #1336 | ||||||
| - Fixed support for `__` to escape field names that match operators names in `update` #1351 | - Fix support for ``__`` to escape field names that match operators' names in ``update``. #1351 | ||||||
| - Fixed BaseDocument#_mark_as_changed #1369 | - Fix ``BaseDocument._mark_as_changed``. #1369 | ||||||
| - Added support for pickling QuerySet instances. #1397 | - Add support for pickling ``QuerySet`` instances. #1397 | ||||||
| - Fixed connecting to a list of hosts #1389 | - Fix connecting to a list of hosts. #1389 | ||||||
| - Fixed a bug where accessing broken references wouldn't raise a DoesNotExist error #1334 | - Fix a bug where accessing broken references wouldn't raise a ``DoesNotExist`` error. #1334 | ||||||
| - Fixed not being able to specify use_db_field=False on ListField(EmbeddedDocumentField) instances #1218 | - Fix not being able to specify ``use_db_field=False`` on ``ListField(EmbeddedDocumentField)`` instances. #1218 | ||||||
| - Improvements to the dictionary fields docs #1383 | - Improvements to the dictionary field's docs. #1383 | ||||||
|  |  | ||||||
| Changes in 0.10.6 | Changes in 0.10.6 | ||||||
| ================= | ================= | ||||||
| - Add support for mocking MongoEngine based on mongomock. #1151 | - Add support for mocking MongoEngine based on mongomock. #1151 | ||||||
| - Fixed not being able to run tests on Windows. #1153 | - Fix not being able to run tests on Windows. #1153 | ||||||
| - Allow creation of sparse compound indexes. #1114 | - Allow creation of sparse compound indexes. #1114 | ||||||
| - count on ListField of EmbeddedDocumentField fails. #1187 |  | ||||||
|  |  | ||||||
| Changes in 0.10.5 | Changes in 0.10.5 | ||||||
| ================= | ================= | ||||||
| @@ -208,12 +264,12 @@ Changes in 0.10.5 | |||||||
|  |  | ||||||
| Changes in 0.10.4 | Changes in 0.10.4 | ||||||
| ================= | ================= | ||||||
| - SaveConditionError is now importable from the top level package. #1165 | - ``SaveConditionError`` is now importable from the top level package. #1165 | ||||||
| - upsert_one method added. #1157 | - Add a ``QuerySet.upsert_one`` method. #1157 | ||||||
|  |  | ||||||
| Changes in 0.10.3 | Changes in 0.10.3 | ||||||
| ================= | ================= | ||||||
| - Fix `read_preference` (it had chaining issues with PyMongo 2.x and it didn't work at all with PyMongo 3.x) #1042 | - Fix ``read_preference`` (it had chaining issues with PyMongo v2.x and it didn't work at all with PyMongo v3.x). #1042 | ||||||
|  |  | ||||||
| Changes in 0.10.2 | Changes in 0.10.2 | ||||||
| ================= | ================= | ||||||
| @@ -223,16 +279,16 @@ Changes in 0.10.2 | |||||||
|  |  | ||||||
| Changes in 0.10.1 | Changes in 0.10.1 | ||||||
| ================= | ================= | ||||||
| - Fix infinite recursion with CASCADE delete rules under specific conditions. #1046 | - Fix infinite recursion with cascade delete rules under specific conditions. #1046 | ||||||
| - Fix CachedReferenceField bug when loading cached docs as DBRef but failing to save them. #1047 | - Fix ``CachedReferenceField`` bug when loading cached docs as ``DBRef`` but failing to save them. #1047 | ||||||
| - Fix ignored chained options #842 | - Fix ignored chained options. #842 | ||||||
| - Document save's save_condition error raises `SaveConditionError` exception #1070 | - ``Document.save``'s ``save_condition`` error raises a ``SaveConditionError`` exception. #1070 | ||||||
| - Fix Document.reload for DynamicDocument. #1050 | - Fix ``Document.reload`` for the ``DynamicDocument``. #1050 | ||||||
| - StrictDict & SemiStrictDict are shadowed at init time. #1105 | - ``StrictDict`` & ``SemiStrictDict`` are shadowed at init time. #1105 | ||||||
| - Fix ListField minus index assignment does not work. #1119 | - Fix ``ListField`` negative index assignment not working. #1119 | ||||||
| - Remove code that marks field as changed when the field has default but not existed in database #1126 | - Remove code that marks a field as changed when the field has a default value but does not exist in the database. #1126 | ||||||
| - Remove test dependencies (nose and rednose) from install dependencies list. #1079 | - Remove test dependencies (nose and rednose) from install dependencies. #1079 | ||||||
| - Recursively build query when using elemMatch operator. #1130 | - Recursively build a query when using the ``elemMatch`` operator. #1130 | ||||||
| - Fix instance back references for lists of embedded documents. #1131 | - Fix instance back references for lists of embedded documents. #1131 | ||||||
|  |  | ||||||
| Changes in 0.10.0 | Changes in 0.10.0 | ||||||
| @@ -243,7 +299,7 @@ Changes in 0.10.0 | |||||||
| - Removed get_or_create() deprecated since 0.8.0. #300 | - Removed get_or_create() deprecated since 0.8.0. #300 | ||||||
| - Improve Document._created status when switch collection and db #1020 | - Improve Document._created status when switch collection and db #1020 | ||||||
| - Queryset update doesn't go through field validation #453 | - Queryset update doesn't go through field validation #453 | ||||||
| - Added support for specifying authentication source as option `authSource` in URI. #967 | - Added support for specifying authentication source as option ``authSource`` in URI. #967 | ||||||
| - Fixed mark_as_changed to handle higher/lower level fields changed. #927 | - Fixed mark_as_changed to handle higher/lower level fields changed. #927 | ||||||
| - ListField of embedded docs doesn't set the _instance attribute when iterating over it #914 | - ListField of embedded docs doesn't set the _instance attribute when iterating over it #914 | ||||||
| - Support += and *= for ListField #595 | - Support += and *= for ListField #595 | ||||||
| @@ -259,7 +315,7 @@ Changes in 0.10.0 | |||||||
| - Fixes some internal _id handling issue. #961 | - Fixes some internal _id handling issue. #961 | ||||||
| - Updated URL and Email Field regex validators, added schemes argument to URLField validation. #652 | - Updated URL and Email Field regex validators, added schemes argument to URLField validation. #652 | ||||||
| - Capped collection multiple of 256. #1011 | - Capped collection multiple of 256. #1011 | ||||||
| - Added `BaseQuerySet.aggregate_sum` and `BaseQuerySet.aggregate_average` methods. | - Added ``BaseQuerySet.aggregate_sum`` and ``BaseQuerySet.aggregate_average`` methods. | ||||||
| - Fix for delete with write_concern {'w': 0}. #1008 | - Fix for delete with write_concern {'w': 0}. #1008 | ||||||
| - Allow dynamic lookup for more than two parts. #882 | - Allow dynamic lookup for more than two parts. #882 | ||||||
| - Added support for min_distance on geo queries. #831 | - Added support for min_distance on geo queries. #831 | ||||||
| @@ -268,10 +324,10 @@ Changes in 0.10.0 | |||||||
| Changes in 0.9.0 | Changes in 0.9.0 | ||||||
| ================ | ================ | ||||||
| - Update FileField when creating a new file #714 | - Update FileField when creating a new file #714 | ||||||
| - Added `EmbeddedDocumentListField` for Lists of Embedded Documents. #826 | - Added ``EmbeddedDocumentListField`` for Lists of Embedded Documents. #826 | ||||||
| - ComplexDateTimeField should fall back to None when null=True #864 | - ComplexDateTimeField should fall back to None when null=True #864 | ||||||
| - Request Support for $min, $max Field update operators #863 | - Request Support for $min, $max Field update operators #863 | ||||||
| - `BaseDict` does not follow `setdefault` #866 | - ``BaseDict`` does not follow ``setdefault`` #866 | ||||||
| - Add support for $type operator # 766 | - Add support for $type operator # 766 | ||||||
| - Fix tests for pymongo 2.8+ #877 | - Fix tests for pymongo 2.8+ #877 | ||||||
| - No module named 'django.utils.importlib' (Django dev) #872 | - No module named 'django.utils.importlib' (Django dev) #872 | ||||||
| @@ -292,13 +348,13 @@ Changes in 0.9.0 | |||||||
| - Stop ensure_indexes running on a secondaries unless connection is through mongos #746 | - Stop ensure_indexes running on a secondaries unless connection is through mongos #746 | ||||||
| - Not overriding default values when loading a subset of fields #399 | - Not overriding default values when loading a subset of fields #399 | ||||||
| - Saving document doesn't create new fields in existing collection #620 | - Saving document doesn't create new fields in existing collection #620 | ||||||
| - Added `Queryset.aggregate` wrapper to aggregation framework #703 | - Added ``Queryset.aggregate`` wrapper to aggregation framework #703 | ||||||
| - Added support to show original model fields on to_json calls instead of db_field #697 | - Added support to show original model fields on to_json calls instead of db_field #697 | ||||||
| - Added Queryset.search_text to Text indexes searchs #700 | - Added Queryset.search_text to Text indexes searchs #700 | ||||||
| - Fixed tests for Django 1.7 #696 | - Fixed tests for Django 1.7 #696 | ||||||
| - Follow ReferenceFields in EmbeddedDocuments with select_related #690 | - Follow ReferenceFields in EmbeddedDocuments with select_related #690 | ||||||
| - Added preliminary support for text indexes #680 | - Added preliminary support for text indexes #680 | ||||||
| - Added `elemMatch` operator as well - `match` is too obscure #653 | - Added ``elemMatch`` operator as well - ``match`` is too obscure #653 | ||||||
| - Added support for progressive JPEG #486 #548 | - Added support for progressive JPEG #486 #548 | ||||||
| - Allow strings to be used in index creation #675 | - Allow strings to be used in index creation #675 | ||||||
| - Fixed EmbeddedDoc weakref proxy issue #592 | - Fixed EmbeddedDoc weakref proxy issue #592 | ||||||
| @@ -334,11 +390,11 @@ Changes in 0.9.0 | |||||||
| - Increase email field length to accommodate new TLDs #726 | - Increase email field length to accommodate new TLDs #726 | ||||||
| - index_cls is ignored when deciding to set _cls as index prefix #733 | - index_cls is ignored when deciding to set _cls as index prefix #733 | ||||||
| - Make 'db' argument to connection optional #737 | - Make 'db' argument to connection optional #737 | ||||||
| - Allow atomic update for the entire `DictField` #742 | - Allow atomic update for the entire ``DictField`` #742 | ||||||
| - Added MultiPointField, MultiLineField, MultiPolygonField | - Added MultiPointField, MultiLineField, MultiPolygonField | ||||||
| - Fix multiple connections aliases being rewritten #748 | - Fix multiple connections aliases being rewritten #748 | ||||||
| - Fixed a few instances where reverse_delete_rule was written as reverse_delete_rules. #791 | - Fixed a few instances where reverse_delete_rule was written as reverse_delete_rules. #791 | ||||||
| - Make `in_bulk()` respect `no_dereference()` #775 | - Make ``in_bulk()`` respect ``no_dereference()`` #775 | ||||||
| - Handle None from model __str__; Fixes #753 #754 | - Handle None from model __str__; Fixes #753 #754 | ||||||
| - _get_changed_fields fix for embedded documents with id field. #925 | - _get_changed_fields fix for embedded documents with id field. #925 | ||||||
|  |  | ||||||
| @@ -392,18 +448,18 @@ Changes in 0.8.4 | |||||||
|  |  | ||||||
| Changes in 0.8.3 | Changes in 0.8.3 | ||||||
| ================ | ================ | ||||||
| - Fixed EmbeddedDocuments with `id` also storing `_id` (#402) | - Fixed EmbeddedDocuments with ``id`` also storing ``_id`` (#402) | ||||||
| - Added get_proxy_object helper to filefields (#391) | - Added get_proxy_object helper to filefields (#391) | ||||||
| - Added QuerySetNoCache and QuerySet.no_cache() for lower memory consumption (#365) | - Added QuerySetNoCache and QuerySet.no_cache() for lower memory consumption (#365) | ||||||
| - Fixed sum and average mapreduce dot notation support (#375, #376, #393) | - Fixed sum and average mapreduce dot notation support (#375, #376, #393) | ||||||
| - Fixed as_pymongo to return the id (#386) | - Fixed as_pymongo to return the id (#386) | ||||||
| - Document.select_related() now respects `db_alias` (#377) | - Document.select_related() now respects ``db_alias`` (#377) | ||||||
| - Reload uses shard_key if applicable (#384) | - Reload uses shard_key if applicable (#384) | ||||||
| - Dynamic fields are ordered based on creation and stored in _fields_ordered (#396) | - Dynamic fields are ordered based on creation and stored in _fields_ordered (#396) | ||||||
|  |  | ||||||
|   **Potential breaking change:** http://docs.mongoengine.org/en/latest/upgrade.html#to-0-8-3 |   **Potential breaking change:** http://docs.mongoengine.org/en/latest/upgrade.html#to-0-8-3 | ||||||
|  |  | ||||||
| - Fixed pickling dynamic documents `_dynamic_fields` (#387) | - Fixed pickling dynamic documents ``_dynamic_fields`` (#387) | ||||||
| - Fixed ListField setslice and delslice dirty tracking (#390) | - Fixed ListField setslice and delslice dirty tracking (#390) | ||||||
| - Added Django 1.5 PY3 support (#392) | - Added Django 1.5 PY3 support (#392) | ||||||
| - Added match ($elemMatch) support for EmbeddedDocuments (#379) | - Added match ($elemMatch) support for EmbeddedDocuments (#379) | ||||||
| @@ -444,7 +500,7 @@ Changes in 0.8.0 | |||||||
| ================ | ================ | ||||||
| - Fixed querying ReferenceField custom_id (#317) | - Fixed querying ReferenceField custom_id (#317) | ||||||
| - Fixed pickle issues with collections (#316) | - Fixed pickle issues with collections (#316) | ||||||
| - Added `get_next_value` preview for SequenceFields (#319) | - Added ``get_next_value`` preview for SequenceFields (#319) | ||||||
| - Added no_sub_classes context manager and queryset helper (#312) | - Added no_sub_classes context manager and queryset helper (#312) | ||||||
| - Querysets now utilises a local cache | - Querysets now utilises a local cache | ||||||
| - Changed __len__ behaviour in the queryset (#247, #311) | - Changed __len__ behaviour in the queryset (#247, #311) | ||||||
| @@ -473,7 +529,7 @@ Changes in 0.8.0 | |||||||
| - Updated connection to use MongoClient (#262, #274) | - Updated connection to use MongoClient (#262, #274) | ||||||
| - Fixed db_alias and inherited Documents (#143) | - Fixed db_alias and inherited Documents (#143) | ||||||
| - Documentation update for document errors (#124) | - Documentation update for document errors (#124) | ||||||
| - Deprecated `get_or_create` (#35) | - Deprecated ``get_or_create`` (#35) | ||||||
| - Updated inheritable objects created by upsert now contain _cls (#118) | - Updated inheritable objects created by upsert now contain _cls (#118) | ||||||
| - Added support for creating documents with embedded documents in a single operation (#6) | - Added support for creating documents with embedded documents in a single operation (#6) | ||||||
| - Added to_json and from_json to Document (#1) | - Added to_json and from_json to Document (#1) | ||||||
| @@ -594,7 +650,7 @@ Changes in 0.7.0 | |||||||
| - Fixed UnboundLocalError in composite index with pk field (#88) | - Fixed UnboundLocalError in composite index with pk field (#88) | ||||||
| - Updated ReferenceField's to optionally store ObjectId strings | - Updated ReferenceField's to optionally store ObjectId strings | ||||||
|   this will become the default in 0.8 (#89) |   this will become the default in 0.8 (#89) | ||||||
| - Added FutureWarning - save will default to `cascade=False` in 0.8 | - Added FutureWarning - save will default to ``cascade=False`` in 0.8 | ||||||
| - Added example of indexing embedded document fields (#75) | - Added example of indexing embedded document fields (#75) | ||||||
| - Fixed ImageField resizing when forcing size (#80) | - Fixed ImageField resizing when forcing size (#80) | ||||||
| - Add flexibility for fields handling bad data (#78) | - Add flexibility for fields handling bad data (#78) | ||||||
| @@ -690,7 +746,7 @@ Changes in 0.6.8 | |||||||
| ================ | ================ | ||||||
| - Fixed FileField losing reference when no default set | - Fixed FileField losing reference when no default set | ||||||
| - Removed possible race condition from FileField (grid_file) | - Removed possible race condition from FileField (grid_file) | ||||||
| - Added assignment to save, can now do: `b = MyDoc(**kwargs).save()` | - Added assignment to save, can now do: ``b = MyDoc(**kwargs).save()`` | ||||||
| - Added support for pull operations on nested EmbeddedDocuments | - Added support for pull operations on nested EmbeddedDocuments | ||||||
| - Added support for choices with GenericReferenceFields | - Added support for choices with GenericReferenceFields | ||||||
| - Added support for choices with GenericEmbeddedDocumentFields | - Added support for choices with GenericEmbeddedDocumentFields | ||||||
| @@ -705,7 +761,7 @@ Changes in 0.6.7 | |||||||
| - Fixed indexing on '_id' or 'pk' or 'id' | - Fixed indexing on '_id' or 'pk' or 'id' | ||||||
| - Invalid data from the DB now raises a InvalidDocumentError | - Invalid data from the DB now raises a InvalidDocumentError | ||||||
| - Cleaned up the Validation Error - docs and code | - Cleaned up the Validation Error - docs and code | ||||||
| - Added meta `auto_create_index` so you can disable index creation | - Added meta ``auto_create_index`` so you can disable index creation | ||||||
| - Added write concern options to inserts | - Added write concern options to inserts | ||||||
| - Fixed typo in meta for index options | - Fixed typo in meta for index options | ||||||
| - Bug fix Read preference now passed correctly | - Bug fix Read preference now passed correctly | ||||||
| @@ -746,7 +802,6 @@ Changes in 0.6.1 | |||||||
|  |  | ||||||
| Changes in 0.6 | Changes in 0.6 | ||||||
| ============== | ============== | ||||||
|  |  | ||||||
| - Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7 | - Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7 | ||||||
| - Added support for covered indexes when inheritance is off | - Added support for covered indexes when inheritance is off | ||||||
| - No longer always upsert on save for items with a '_id' | - No longer always upsert on save for items with a '_id' | ||||||
| @@ -971,7 +1026,6 @@ Changes in v0.1.3 | |||||||
|   querying takes place |   querying takes place | ||||||
| - A few minor bugfixes | - A few minor bugfixes | ||||||
|  |  | ||||||
|  |  | ||||||
| Changes in v0.1.2 | Changes in v0.1.2 | ||||||
| ================= | ================= | ||||||
| - Query values may be processed before before being used in queries | - Query values may be processed before before being used in queries | ||||||
| @@ -980,7 +1034,6 @@ Changes in v0.1.2 | |||||||
| - Added ``BooleanField`` | - Added ``BooleanField`` | ||||||
| - Added ``Document.reload()`` method | - Added ``Document.reload()`` method | ||||||
|  |  | ||||||
|  |  | ||||||
| Changes in v0.1.1 | Changes in v0.1.1 | ||||||
| ================= | ================= | ||||||
| - Documents may now use capped collections | - Documents may now use capped collections | ||||||
|   | |||||||
| @@ -1,16 +1,19 @@ | |||||||
| from mongoengine import * | from mongoengine import * | ||||||
|  |  | ||||||
| connect('tumblelog') | connect("tumblelog") | ||||||
|  |  | ||||||
|  |  | ||||||
| class Comment(EmbeddedDocument): | class Comment(EmbeddedDocument): | ||||||
|     content = StringField() |     content = StringField() | ||||||
|     name = StringField(max_length=120) |     name = StringField(max_length=120) | ||||||
|  |  | ||||||
|  |  | ||||||
| class User(Document): | class User(Document): | ||||||
|     email = StringField(required=True) |     email = StringField(required=True) | ||||||
|     first_name = StringField(max_length=50) |     first_name = StringField(max_length=50) | ||||||
|     last_name = StringField(max_length=50) |     last_name = StringField(max_length=50) | ||||||
|  |  | ||||||
|  |  | ||||||
| class Post(Document): | class Post(Document): | ||||||
|     title = StringField(max_length=120, required=True) |     title = StringField(max_length=120, required=True) | ||||||
|     author = ReferenceField(User) |     author = ReferenceField(User) | ||||||
| @@ -18,54 +21,57 @@ class Post(Document): | |||||||
|     comments = ListField(EmbeddedDocumentField(Comment)) |     comments = ListField(EmbeddedDocumentField(Comment)) | ||||||
|  |  | ||||||
|     # bugfix |     # bugfix | ||||||
|     meta = {'allow_inheritance': True} |     meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |  | ||||||
| class TextPost(Post): | class TextPost(Post): | ||||||
|     content = StringField() |     content = StringField() | ||||||
|  |  | ||||||
|  |  | ||||||
| class ImagePost(Post): | class ImagePost(Post): | ||||||
|     image_path = StringField() |     image_path = StringField() | ||||||
|  |  | ||||||
|  |  | ||||||
| class LinkPost(Post): | class LinkPost(Post): | ||||||
|     link_url = StringField() |     link_url = StringField() | ||||||
|  |  | ||||||
|  |  | ||||||
| Post.drop_collection() | Post.drop_collection() | ||||||
|  |  | ||||||
| john = User(email='jdoe@example.com', first_name='John', last_name='Doe') | john = User(email="jdoe@example.com", first_name="John", last_name="Doe") | ||||||
| john.save() | john.save() | ||||||
|  |  | ||||||
| post1 = TextPost(title='Fun with MongoEngine', author=john) | post1 = TextPost(title="Fun with MongoEngine", author=john) | ||||||
| post1.content = 'Took a look at MongoEngine today, looks pretty cool.' | post1.content = "Took a look at MongoEngine today, looks pretty cool." | ||||||
| post1.tags = ['mongodb', 'mongoengine'] | post1.tags = ["mongodb", "mongoengine"] | ||||||
| post1.save() | post1.save() | ||||||
|  |  | ||||||
| post2 = LinkPost(title='MongoEngine Documentation', author=john) | post2 = LinkPost(title="MongoEngine Documentation", author=john) | ||||||
| post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs' | post2.link_url = "http://tractiondigital.com/labs/mongoengine/docs" | ||||||
| post2.tags = ['mongoengine'] | post2.tags = ["mongoengine"] | ||||||
| post2.save() | post2.save() | ||||||
|  |  | ||||||
| print('ALL POSTS') | print("ALL POSTS") | ||||||
| print() | print() | ||||||
| for post in Post.objects: | for post in Post.objects: | ||||||
|     print(post.title) |     print(post.title) | ||||||
|     #print '=' * post.title.count() |     # print '=' * post.title.count() | ||||||
|     print("=" * 20) |     print("=" * 20) | ||||||
|  |  | ||||||
|     if isinstance(post, TextPost): |     if isinstance(post, TextPost): | ||||||
|         print(post.content) |         print(post.content) | ||||||
|  |  | ||||||
|     if isinstance(post, LinkPost): |     if isinstance(post, LinkPost): | ||||||
|         print('Link:', post.link_url) |         print("Link:", post.link_url) | ||||||
|  |  | ||||||
|     print() |     print() | ||||||
| print() | print() | ||||||
|  |  | ||||||
| print('POSTS TAGGED \'MONGODB\'') | print("POSTS TAGGED 'MONGODB'") | ||||||
| print() | print() | ||||||
| for post in Post.objects(tags='mongodb'): | for post in Post.objects(tags="mongodb"): | ||||||
|     print(post.title) |     print(post.title) | ||||||
| print() | print() | ||||||
|  |  | ||||||
| num_posts = Post.objects(tags='mongodb').count() | num_posts = Post.objects(tags="mongodb").count() | ||||||
| print('Found %d posts with tag "mongodb"' % num_posts) | print('Found %d posts with tag "mongodb"' % num_posts) | ||||||
|   | |||||||
							
								
								
									
										96
									
								
								docs/conf.py
									
									
									
									
									
								
							
							
						
						
									
										96
									
								
								docs/conf.py
									
									
									
									
									
								
							| @@ -11,7 +11,8 @@ | |||||||
| # All configuration values have a default; values that are commented out | # All configuration values have a default; values that are commented out | ||||||
| # serve to show the default. | # serve to show the default. | ||||||
|  |  | ||||||
| import sys, os | import os | ||||||
|  | import sys | ||||||
|  |  | ||||||
| import sphinx_rtd_theme | import sphinx_rtd_theme | ||||||
|  |  | ||||||
| @@ -20,29 +21,29 @@ import mongoengine | |||||||
| # If extensions (or modules to document with autodoc) are in another directory, | # If extensions (or modules to document with autodoc) are in another directory, | ||||||
| # add these directories to sys.path here. If the directory is relative to the | # add these directories to sys.path here. If the directory is relative to the | ||||||
| # documentation root, use os.path.abspath to make it absolute, like shown here. | # documentation root, use os.path.abspath to make it absolute, like shown here. | ||||||
| sys.path.insert(0, os.path.abspath('..')) | sys.path.insert(0, os.path.abspath("..")) | ||||||
|  |  | ||||||
| # -- General configuration ----------------------------------------------------- | # -- General configuration ----------------------------------------------------- | ||||||
|  |  | ||||||
| # Add any Sphinx extension module names here, as strings. They can be extensions | # Add any Sphinx extension module names here, as strings. They can be extensions | ||||||
| # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. | ||||||
| extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo'] | extensions = ["sphinx.ext.autodoc", "sphinx.ext.todo"] | ||||||
|  |  | ||||||
| # Add any paths that contain templates here, relative to this directory. | # Add any paths that contain templates here, relative to this directory. | ||||||
| templates_path = ['_templates'] | templates_path = ["_templates"] | ||||||
|  |  | ||||||
| # The suffix of source filenames. | # The suffix of source filenames. | ||||||
| source_suffix = '.rst' | source_suffix = ".rst" | ||||||
|  |  | ||||||
| # The encoding of source files. | # The encoding of source files. | ||||||
| #source_encoding = 'utf-8' | # source_encoding = 'utf-8' | ||||||
|  |  | ||||||
| # The master toctree document. | # The master toctree document. | ||||||
| master_doc = 'index' | master_doc = "index" | ||||||
|  |  | ||||||
| # General information about the project. | # General information about the project. | ||||||
| project = u'MongoEngine' | project = u"MongoEngine" | ||||||
| copyright = u'2009, MongoEngine Authors' | copyright = u"2009, MongoEngine Authors" | ||||||
|  |  | ||||||
| # The version info for the project you're documenting, acts as replacement for | # The version info for the project you're documenting, acts as replacement for | ||||||
| # |version| and |release|, also used in various other places throughout the | # |version| and |release|, also used in various other places throughout the | ||||||
| @@ -55,68 +56,66 @@ release = mongoengine.get_version() | |||||||
|  |  | ||||||
| # The language for content autogenerated by Sphinx. Refer to documentation | # The language for content autogenerated by Sphinx. Refer to documentation | ||||||
| # for a list of supported languages. | # for a list of supported languages. | ||||||
| #language = None | # language = None | ||||||
|  |  | ||||||
| # There are two options for replacing |today|: either, you set today to some | # There are two options for replacing |today|: either, you set today to some | ||||||
| # non-false value, then it is used: | # non-false value, then it is used: | ||||||
| #today = '' | # today = '' | ||||||
| # Else, today_fmt is used as the format for a strftime call. | # Else, today_fmt is used as the format for a strftime call. | ||||||
| #today_fmt = '%B %d, %Y' | # today_fmt = '%B %d, %Y' | ||||||
|  |  | ||||||
| # List of documents that shouldn't be included in the build. | # List of documents that shouldn't be included in the build. | ||||||
| #unused_docs = [] | # unused_docs = [] | ||||||
|  |  | ||||||
| # List of directories, relative to source directory, that shouldn't be searched | # List of directories, relative to source directory, that shouldn't be searched | ||||||
| # for source files. | # for source files. | ||||||
| exclude_trees = ['_build'] | exclude_trees = ["_build"] | ||||||
|  |  | ||||||
| # The reST default role (used for this markup: `text`) to use for all documents. | # The reST default role (used for this markup: `text`) to use for all documents. | ||||||
| #default_role = None | # default_role = None | ||||||
|  |  | ||||||
| # If true, '()' will be appended to :func: etc. cross-reference text. | # If true, '()' will be appended to :func: etc. cross-reference text. | ||||||
| #add_function_parentheses = True | # add_function_parentheses = True | ||||||
|  |  | ||||||
| # If true, the current module name will be prepended to all description | # If true, the current module name will be prepended to all description | ||||||
| # unit titles (such as .. function::). | # unit titles (such as .. function::). | ||||||
| #add_module_names = True | # add_module_names = True | ||||||
|  |  | ||||||
| # If true, sectionauthor and moduleauthor directives will be shown in the | # If true, sectionauthor and moduleauthor directives will be shown in the | ||||||
| # output. They are ignored by default. | # output. They are ignored by default. | ||||||
| #show_authors = False | # show_authors = False | ||||||
|  |  | ||||||
| # The name of the Pygments (syntax highlighting) style to use. | # The name of the Pygments (syntax highlighting) style to use. | ||||||
| pygments_style = 'sphinx' | pygments_style = "sphinx" | ||||||
|  |  | ||||||
| # A list of ignored prefixes for module index sorting. | # A list of ignored prefixes for module index sorting. | ||||||
| #modindex_common_prefix = [] | # modindex_common_prefix = [] | ||||||
|  |  | ||||||
|  |  | ||||||
| # -- Options for HTML output --------------------------------------------------- | # -- Options for HTML output --------------------------------------------------- | ||||||
|  |  | ||||||
| # The theme to use for HTML and HTML Help pages.  Major themes that come with | # The theme to use for HTML and HTML Help pages.  Major themes that come with | ||||||
| # Sphinx are currently 'default' and 'sphinxdoc'. | # Sphinx are currently 'default' and 'sphinxdoc'. | ||||||
| html_theme = 'sphinx_rtd_theme' | html_theme = "sphinx_rtd_theme" | ||||||
|  |  | ||||||
| # Theme options are theme-specific and customize the look and feel of a theme | # Theme options are theme-specific and customize the look and feel of a theme | ||||||
| # further.  For a list of options available for each theme, see the | # further.  For a list of options available for each theme, see the | ||||||
| # documentation. | # documentation. | ||||||
| html_theme_options = { | html_theme_options = {"canonical_url": "http://docs.mongoengine.org/en/latest/"} | ||||||
|     'canonical_url': 'http://docs.mongoengine.org/en/latest/' |  | ||||||
| } |  | ||||||
|  |  | ||||||
| # Add any paths that contain custom themes here, relative to this directory. | # Add any paths that contain custom themes here, relative to this directory. | ||||||
| html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] | ||||||
|  |  | ||||||
| # The name for this set of Sphinx documents.  If None, it defaults to | # The name for this set of Sphinx documents.  If None, it defaults to | ||||||
| # "<project> v<release> documentation". | # "<project> v<release> documentation". | ||||||
| #html_title = None | # html_title = None | ||||||
|  |  | ||||||
| # A shorter title for the navigation bar.  Default is the same as html_title. | # A shorter title for the navigation bar.  Default is the same as html_title. | ||||||
| #html_short_title = None | # html_short_title = None | ||||||
|  |  | ||||||
| # The name of an image file (relative to this directory) to place at the top | # The name of an image file (relative to this directory) to place at the top | ||||||
| # of the sidebar. | # of the sidebar. | ||||||
| #html_logo = None | # html_logo = None | ||||||
|  |  | ||||||
| # The name of an image file (within the static path) to use as favicon of the | # The name of an image file (within the static path) to use as favicon of the | ||||||
| # docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32 | # docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32 | ||||||
| @@ -126,11 +125,11 @@ html_favicon = "favicon.ico" | |||||||
| # Add any paths that contain custom static files (such as style sheets) here, | # Add any paths that contain custom static files (such as style sheets) here, | ||||||
| # relative to this directory. They are copied after the builtin static files, | # relative to this directory. They are copied after the builtin static files, | ||||||
| # so a file named "default.css" will overwrite the builtin "default.css". | # so a file named "default.css" will overwrite the builtin "default.css". | ||||||
| #html_static_path = ['_static'] | # html_static_path = ['_static'] | ||||||
|  |  | ||||||
| # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, | ||||||
| # using the given strftime format. | # using the given strftime format. | ||||||
| #html_last_updated_fmt = '%b %d, %Y' | # html_last_updated_fmt = '%b %d, %Y' | ||||||
|  |  | ||||||
| # If true, SmartyPants will be used to convert quotes and dashes to | # If true, SmartyPants will be used to convert quotes and dashes to | ||||||
| # typographically correct entities. | # typographically correct entities. | ||||||
| @@ -138,69 +137,68 @@ html_use_smartypants = True | |||||||
|  |  | ||||||
| # Custom sidebar templates, maps document names to template names. | # Custom sidebar templates, maps document names to template names. | ||||||
| html_sidebars = { | html_sidebars = { | ||||||
|     'index': ['globaltoc.html', 'searchbox.html'], |     "index": ["globaltoc.html", "searchbox.html"], | ||||||
|     '**': ['localtoc.html', 'relations.html', 'searchbox.html'] |     "**": ["localtoc.html", "relations.html", "searchbox.html"], | ||||||
| } | } | ||||||
|  |  | ||||||
|  |  | ||||||
| # Additional templates that should be rendered to pages, maps page names to | # Additional templates that should be rendered to pages, maps page names to | ||||||
| # template names. | # template names. | ||||||
| #html_additional_pages = {} | # html_additional_pages = {} | ||||||
|  |  | ||||||
| # If false, no module index is generated. | # If false, no module index is generated. | ||||||
| #html_use_modindex = True | # html_use_modindex = True | ||||||
|  |  | ||||||
| # If false, no index is generated. | # If false, no index is generated. | ||||||
| #html_use_index = True | # html_use_index = True | ||||||
|  |  | ||||||
| # If true, the index is split into individual pages for each letter. | # If true, the index is split into individual pages for each letter. | ||||||
| #html_split_index = False | # html_split_index = False | ||||||
|  |  | ||||||
| # If true, links to the reST sources are added to the pages. | # If true, links to the reST sources are added to the pages. | ||||||
| #html_show_sourcelink = True | # html_show_sourcelink = True | ||||||
|  |  | ||||||
| # If true, an OpenSearch description file will be output, and all pages will | # If true, an OpenSearch description file will be output, and all pages will | ||||||
| # contain a <link> tag referring to it.  The value of this option must be the | # contain a <link> tag referring to it.  The value of this option must be the | ||||||
| # base URL from which the finished HTML is served. | # base URL from which the finished HTML is served. | ||||||
| #html_use_opensearch = '' | # html_use_opensearch = '' | ||||||
|  |  | ||||||
| # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). | # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). | ||||||
| #html_file_suffix = '' | # html_file_suffix = '' | ||||||
|  |  | ||||||
| # Output file base name for HTML help builder. | # Output file base name for HTML help builder. | ||||||
| htmlhelp_basename = 'MongoEnginedoc' | htmlhelp_basename = "MongoEnginedoc" | ||||||
|  |  | ||||||
|  |  | ||||||
| # -- Options for LaTeX output -------------------------------------------------- | # -- Options for LaTeX output -------------------------------------------------- | ||||||
|  |  | ||||||
| # The paper size ('letter' or 'a4'). | # The paper size ('letter' or 'a4'). | ||||||
| latex_paper_size = 'a4' | latex_paper_size = "a4" | ||||||
|  |  | ||||||
| # The font size ('10pt', '11pt' or '12pt'). | # The font size ('10pt', '11pt' or '12pt'). | ||||||
| #latex_font_size = '10pt' | # latex_font_size = '10pt' | ||||||
|  |  | ||||||
| # Grouping the document tree into LaTeX files. List of tuples | # Grouping the document tree into LaTeX files. List of tuples | ||||||
| # (source start file, target name, title, author, documentclass [howto/manual]). | # (source start file, target name, title, author, documentclass [howto/manual]). | ||||||
| latex_documents = [ | latex_documents = [ | ||||||
|   ('index', 'MongoEngine.tex', 'MongoEngine Documentation', |     ("index", "MongoEngine.tex", "MongoEngine Documentation", "Ross Lawley", "manual") | ||||||
|    'Ross Lawley', 'manual'), |  | ||||||
| ] | ] | ||||||
|  |  | ||||||
| # The name of an image file (relative to this directory) to place at the top of | # The name of an image file (relative to this directory) to place at the top of | ||||||
| # the title page. | # the title page. | ||||||
| #latex_logo = None | # latex_logo = None | ||||||
|  |  | ||||||
| # For "manual" documents, if this is true, then toplevel headings are parts, | # For "manual" documents, if this is true, then toplevel headings are parts, | ||||||
| # not chapters. | # not chapters. | ||||||
| #latex_use_parts = False | # latex_use_parts = False | ||||||
|  |  | ||||||
| # Additional stuff for the LaTeX preamble. | # Additional stuff for the LaTeX preamble. | ||||||
| #latex_preamble = '' | # latex_preamble = '' | ||||||
|  |  | ||||||
| # Documents to append as an appendix to all manuals. | # Documents to append as an appendix to all manuals. | ||||||
| #latex_appendices = [] | # latex_appendices = [] | ||||||
|  |  | ||||||
| # If false, no module index is generated. | # If false, no module index is generated. | ||||||
| #latex_use_modindex = True | # latex_use_modindex = True | ||||||
|  |  | ||||||
| autoclass_content = 'both' | autoclass_content = "both" | ||||||
|   | |||||||
| @@ -13,7 +13,7 @@ Help Wanted! | |||||||
|  |  | ||||||
| The MongoEngine team is looking for help contributing and maintaining a new | The MongoEngine team is looking for help contributing and maintaining a new | ||||||
| Django extension for MongoEngine! If you have Django experience and would like | Django extension for MongoEngine! If you have Django experience and would like | ||||||
| to help contribute to the project, please get in touch on the  | to help contribute to the project, please get in touch on the | ||||||
| `mailing list <http://groups.google.com/group/mongoengine-users>`_ or by  | `mailing list <http://groups.google.com/group/mongoengine-users>`_ or by | ||||||
| simply contributing on | simply contributing on | ||||||
| `GitHub <https://github.com/MongoEngine/django-mongoengine>`_. | `GitHub <https://github.com/MongoEngine/django-mongoengine>`_. | ||||||
|   | |||||||
							
								
								
									
										12
									
								
								docs/faq.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								docs/faq.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,12 @@ | |||||||
|  | ========================== | ||||||
|  | Frequently Asked Questions | ||||||
|  | ========================== | ||||||
|  |  | ||||||
|  | Does MongoEngine support asynchronous drivers (Motor, TxMongo)? | ||||||
|  | --------------------------------------------------------------- | ||||||
|  |  | ||||||
|  | No, MongoEngine is exclusively based on PyMongo and isn't designed to support other driver. | ||||||
|  | If this is a requirement for your project, check the alternative:  `uMongo`_ and `MotorEngine`_. | ||||||
|  |  | ||||||
|  | .. _uMongo: https://umongo.readthedocs.io/ | ||||||
|  | .. _MotorEngine: https://motorengine.readthedocs.io/ | ||||||
| @@ -86,7 +86,7 @@ using 3 different databases to store data:: | |||||||
|         connect(alias='user-db-alias', db='user-db') |         connect(alias='user-db-alias', db='user-db') | ||||||
|         connect(alias='book-db-alias', db='book-db') |         connect(alias='book-db-alias', db='book-db') | ||||||
|         connect(alias='users-books-db-alias', db='users-books-db') |         connect(alias='users-books-db-alias', db='users-books-db') | ||||||
|          |  | ||||||
|         class User(Document): |         class User(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|   | |||||||
| @@ -352,7 +352,7 @@ Its value can take any of the following constants: | |||||||
|   Deletion is denied if there still exist references to the object being |   Deletion is denied if there still exist references to the object being | ||||||
|   deleted. |   deleted. | ||||||
| :const:`mongoengine.NULLIFY` | :const:`mongoengine.NULLIFY` | ||||||
|   Any object's fields still referring to the object being deleted are removed |   Any object's fields still referring to the object being deleted are set to None | ||||||
|   (using MongoDB's "unset" operation), effectively nullifying the relationship. |   (using MongoDB's "unset" operation), effectively nullifying the relationship. | ||||||
| :const:`mongoengine.CASCADE` | :const:`mongoengine.CASCADE` | ||||||
|   Any object containing fields that are referring to the object being deleted |   Any object containing fields that are referring to the object being deleted | ||||||
| @@ -555,7 +555,6 @@ There are a few top level defaults for all indexes that can be set:: | |||||||
|             'index_background': True, |             'index_background': True, | ||||||
|             'index_cls': False, |             'index_cls': False, | ||||||
|             'auto_create_index': True, |             'auto_create_index': True, | ||||||
|             'index_drop_dups': True, |  | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -574,11 +573,6 @@ There are a few top level defaults for all indexes that can be set:: | |||||||
|     in systems where indexes are managed separately. Disabling this will improve |     in systems where indexes are managed separately. Disabling this will improve | ||||||
|     performance. |     performance. | ||||||
|  |  | ||||||
| :attr:`index_drop_dups` (Optional) |  | ||||||
|     Set the default value for if an index should drop duplicates |  | ||||||
|     Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning |  | ||||||
|     and has no effect |  | ||||||
|  |  | ||||||
|  |  | ||||||
| Compound Indexes and Indexing sub documents | Compound Indexes and Indexing sub documents | ||||||
| ------------------------------------------- | ------------------------------------------- | ||||||
| @@ -714,11 +708,16 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. :: | |||||||
| Shard keys | Shard keys | ||||||
| ========== | ========== | ||||||
|  |  | ||||||
| If your collection is sharded, then you need to specify the shard key as a tuple, | If your collection is sharded by multiple keys, then you can improve shard | ||||||
| using the :attr:`shard_key` attribute of :attr:`~mongoengine.Document.meta`. | routing (and thus the performance of your application) by specifying the shard | ||||||
| This ensures that the shard key is sent with the query when calling the | key, using the :attr:`shard_key` attribute of | ||||||
| :meth:`~mongoengine.document.Document.save` or | :attr:`~mongoengine.Document.meta`. The shard key should be defined as a tuple. | ||||||
| :meth:`~mongoengine.document.Document.update` method on an existing |  | ||||||
|  | This ensures that the full shard key is sent with the query when calling | ||||||
|  | methods such as :meth:`~mongoengine.document.Document.save`, | ||||||
|  | :meth:`~mongoengine.document.Document.update`, | ||||||
|  | :meth:`~mongoengine.document.Document.modify`, or | ||||||
|  | :meth:`~mongoengine.document.Document.delete` on an existing | ||||||
| :class:`~mongoengine.Document` instance:: | :class:`~mongoengine.Document` instance:: | ||||||
|  |  | ||||||
|     class LogEntry(Document): |     class LogEntry(Document): | ||||||
| @@ -728,7 +727,8 @@ This ensures that the shard key is sent with the query when calling the | |||||||
|         data = StringField() |         data = StringField() | ||||||
|  |  | ||||||
|         meta = { |         meta = { | ||||||
|             'shard_key': ('machine', 'timestamp',) |             'shard_key': ('machine', 'timestamp'), | ||||||
|  |             'indexes': ('machine', 'timestamp'), | ||||||
|         } |         } | ||||||
|  |  | ||||||
| .. _document-inheritance: | .. _document-inheritance: | ||||||
| @@ -738,7 +738,7 @@ Document inheritance | |||||||
|  |  | ||||||
| To create a specialised type of a :class:`~mongoengine.Document` you have | To create a specialised type of a :class:`~mongoengine.Document` you have | ||||||
| defined, you may subclass it and add any extra fields or methods you may need. | defined, you may subclass it and add any extra fields or methods you may need. | ||||||
| As this is new class is not a direct subclass of | As this new class is not a direct subclass of | ||||||
| :class:`~mongoengine.Document`, it will not be stored in its own collection; it | :class:`~mongoengine.Document`, it will not be stored in its own collection; it | ||||||
| will use the same collection as its superclass uses. This allows for more | will use the same collection as its superclass uses. This allows for more | ||||||
| convenient and efficient retrieval of related documents -- all you need do is | convenient and efficient retrieval of related documents -- all you need do is | ||||||
| @@ -761,6 +761,27 @@ document.:: | |||||||
|           Setting :attr:`allow_inheritance` to True should also be used in |           Setting :attr:`allow_inheritance` to True should also be used in | ||||||
|           :class:`~mongoengine.EmbeddedDocument` class in case you need to subclass it |           :class:`~mongoengine.EmbeddedDocument` class in case you need to subclass it | ||||||
|  |  | ||||||
|  | When it comes to querying using :attr:`.objects()`, querying `Page.objects()` will query | ||||||
|  | both `Page` and `DatedPage` whereas querying `DatedPage` will only query the `DatedPage` documents. | ||||||
|  | Behind the scenes, MongoEngine deals with inheritance by adding a :attr:`_cls` attribute that contains | ||||||
|  | the class name in every documents. When a document is loaded, MongoEngine checks | ||||||
|  | it's :attr:`_cls` attribute and use that class to construct the instance.:: | ||||||
|  |  | ||||||
|  |     Page(title='a funky title').save() | ||||||
|  |     DatedPage(title='another title', date=datetime.utcnow()).save() | ||||||
|  |  | ||||||
|  |     print(Page.objects().count())         # 2 | ||||||
|  |     print(DatedPage.objects().count())    # 1 | ||||||
|  |  | ||||||
|  |     # print documents in their native form | ||||||
|  |     # we remove 'id' to avoid polluting the output with unnecessary detail | ||||||
|  |     qs = Page.objects.exclude('id').as_pymongo() | ||||||
|  |     print(list(qs)) | ||||||
|  |     # [ | ||||||
|  |     #   {'_cls': u 'Page', 'title': 'a funky title'}, | ||||||
|  |     #   {'_cls': u 'Page.DatedPage', 'title': u 'another title', 'date': datetime.datetime(2019, 12, 13, 20, 16, 59, 993000)} | ||||||
|  |     # ] | ||||||
|  |  | ||||||
| Working with existing data | Working with existing data | ||||||
| -------------------------- | -------------------------- | ||||||
| As MongoEngine no longer defaults to needing :attr:`_cls`, you can quickly and | As MongoEngine no longer defaults to needing :attr:`_cls`, you can quickly and | ||||||
|   | |||||||
| @@ -10,8 +10,9 @@ Writing | |||||||
| GridFS support comes in the form of the :class:`~mongoengine.fields.FileField` field | GridFS support comes in the form of the :class:`~mongoengine.fields.FileField` field | ||||||
| object. This field acts as a file-like object and provides a couple of | object. This field acts as a file-like object and provides a couple of | ||||||
| different ways of inserting and retrieving data. Arbitrary metadata such as | different ways of inserting and retrieving data. Arbitrary metadata such as | ||||||
| content type can also be stored alongside the files. In the following example, | content type can also be stored alongside the files. The object returned when accessing a | ||||||
| a document is created to store details about animals, including a photo:: | FileField is a proxy to `Pymongo's GridFS <https://api.mongodb.com/python/current/examples/gridfs.html#gridfs-example>`_ | ||||||
|  | In the following example, a document is created to store details about animals, including a photo:: | ||||||
|  |  | ||||||
|     class Animal(Document): |     class Animal(Document): | ||||||
|         genus = StringField() |         genus = StringField() | ||||||
| @@ -20,8 +21,8 @@ a document is created to store details about animals, including a photo:: | |||||||
|  |  | ||||||
|     marmot = Animal(genus='Marmota', family='Sciuridae') |     marmot = Animal(genus='Marmota', family='Sciuridae') | ||||||
|  |  | ||||||
|     marmot_photo = open('marmot.jpg', 'rb') |     with open('marmot.jpg', 'rb') as fd: | ||||||
|     marmot.photo.put(marmot_photo, content_type = 'image/jpeg') |         marmot.photo.put(fd, content_type = 'image/jpeg') | ||||||
|     marmot.save() |     marmot.save() | ||||||
|  |  | ||||||
| Retrieval | Retrieval | ||||||
| @@ -34,6 +35,20 @@ field. The file can also be retrieved just as easily:: | |||||||
|     photo = marmot.photo.read() |     photo = marmot.photo.read() | ||||||
|     content_type = marmot.photo.content_type |     content_type = marmot.photo.content_type | ||||||
|  |  | ||||||
|  | .. note:: If you need to read() the content of a file multiple times, you'll need to "rewind" | ||||||
|  |     the file-like object using `seek`:: | ||||||
|  |  | ||||||
|  |         marmot = Animal.objects(genus='Marmota').first() | ||||||
|  |         content1 = marmot.photo.read() | ||||||
|  |         assert content1 != "" | ||||||
|  |  | ||||||
|  |         content2 = marmot.photo.read()    # will be empty | ||||||
|  |         assert content2 == "" | ||||||
|  |  | ||||||
|  |         marmot.photo.seek(0)              # rewind the file by setting the current position of the cursor in the file to 0 | ||||||
|  |         content3 = marmot.photo.read() | ||||||
|  |         assert content3 == content1 | ||||||
|  |  | ||||||
| Streaming | Streaming | ||||||
| --------- | --------- | ||||||
|  |  | ||||||
|   | |||||||
| @@ -13,4 +13,5 @@ User Guide | |||||||
|    gridfs |    gridfs | ||||||
|    signals |    signals | ||||||
|    text-indexes |    text-indexes | ||||||
|  |    logging-monitoring | ||||||
|    mongomock |    mongomock | ||||||
|   | |||||||
| @@ -12,7 +12,7 @@ MongoEngine is available on PyPI, so you can use :program:`pip`: | |||||||
|  |  | ||||||
| .. code-block:: console | .. code-block:: console | ||||||
|  |  | ||||||
|     $ pip install mongoengine |     $ python -m pip install mongoengine | ||||||
|  |  | ||||||
| Alternatively, if you don't have setuptools installed, `download it from PyPi | Alternatively, if you don't have setuptools installed, `download it from PyPi | ||||||
| <http://pypi.python.org/pypi/mongoengine/>`_ and run | <http://pypi.python.org/pypi/mongoengine/>`_ and run | ||||||
|   | |||||||
							
								
								
									
										80
									
								
								docs/guide/logging-monitoring.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										80
									
								
								docs/guide/logging-monitoring.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,80 @@ | |||||||
|  | ================== | ||||||
|  | Logging/Monitoring | ||||||
|  | ================== | ||||||
|  |  | ||||||
|  | It is possible to use `pymongo.monitoring <https://api.mongodb.com/python/current/api/pymongo/monitoring.html>`_ to monitor | ||||||
|  | the driver events (e.g: queries, connections, etc). This can be handy if you want to monitor the queries issued by | ||||||
|  | MongoEngine to the driver. | ||||||
|  |  | ||||||
|  | To use `pymongo.monitoring` with MongoEngine, you need to make sure that you are registering the listeners | ||||||
|  | **before** establishing the database connection (i.e calling `connect`): | ||||||
|  |  | ||||||
|  | The following snippet provides a basic logging of all command events: | ||||||
|  |  | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     import logging | ||||||
|  |     from pymongo import monitoring | ||||||
|  |     from mongoengine import * | ||||||
|  |  | ||||||
|  |     log = logging.getLogger() | ||||||
|  |     log.setLevel(logging.DEBUG) | ||||||
|  |     logging.basicConfig(level=logging.DEBUG) | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     class CommandLogger(monitoring.CommandListener): | ||||||
|  |  | ||||||
|  |         def started(self, event): | ||||||
|  |             log.debug("Command {0.command_name} with request id " | ||||||
|  |                      "{0.request_id} started on server " | ||||||
|  |                      "{0.connection_id}".format(event)) | ||||||
|  |  | ||||||
|  |         def succeeded(self, event): | ||||||
|  |             log.debug("Command {0.command_name} with request id " | ||||||
|  |                      "{0.request_id} on server {0.connection_id} " | ||||||
|  |                      "succeeded in {0.duration_micros} " | ||||||
|  |                      "microseconds".format(event)) | ||||||
|  |  | ||||||
|  |         def failed(self, event): | ||||||
|  |             log.debug("Command {0.command_name} with request id " | ||||||
|  |                      "{0.request_id} on server {0.connection_id} " | ||||||
|  |                      "failed in {0.duration_micros} " | ||||||
|  |                      "microseconds".format(event)) | ||||||
|  |  | ||||||
|  |     monitoring.register(CommandLogger()) | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     class Jedi(Document): | ||||||
|  |         name = StringField() | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     connect() | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     log.info('GO!') | ||||||
|  |  | ||||||
|  |     log.info('Saving an item through MongoEngine...') | ||||||
|  |     Jedi(name='Obi-Wan Kenobii').save() | ||||||
|  |  | ||||||
|  |     log.info('Querying through MongoEngine...') | ||||||
|  |     obiwan = Jedi.objects.first() | ||||||
|  |  | ||||||
|  |     log.info('Updating through MongoEngine...') | ||||||
|  |     obiwan.name = 'Obi-Wan Kenobi' | ||||||
|  |     obiwan.save() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Executing this prints the following output:: | ||||||
|  |  | ||||||
|  |     INFO:root:GO! | ||||||
|  |     INFO:root:Saving an item through MongoEngine... | ||||||
|  |     DEBUG:root:Command insert with request id 1681692777 started on server ('localhost', 27017) | ||||||
|  |     DEBUG:root:Command insert with request id 1681692777 on server ('localhost', 27017) succeeded in 562 microseconds | ||||||
|  |     INFO:root:Querying through MongoEngine... | ||||||
|  |     DEBUG:root:Command find with request id 1714636915 started on server ('localhost', 27017) | ||||||
|  |     DEBUG:root:Command find with request id 1714636915 on server ('localhost', 27017) succeeded in 341 microseconds | ||||||
|  |     INFO:root:Updating through MongoEngine... | ||||||
|  |     DEBUG:root:Command update with request id 1957747793 started on server ('localhost', 27017) | ||||||
|  |     DEBUG:root:Command update with request id 1957747793 on server ('localhost', 27017) succeeded in 455 microseconds | ||||||
|  |  | ||||||
|  | More details can of course be obtained by checking the `event` argument from the `CommandListener`. | ||||||
| @@ -2,10 +2,10 @@ | |||||||
| Use mongomock for testing | Use mongomock for testing | ||||||
| ============================== | ============================== | ||||||
|  |  | ||||||
| `mongomock <https://github.com/vmalloc/mongomock/>`_ is a package to do just  | `mongomock <https://github.com/vmalloc/mongomock/>`_ is a package to do just | ||||||
| what the name implies, mocking a mongo database. | what the name implies, mocking a mongo database. | ||||||
|  |  | ||||||
| To use with mongoengine, simply specify mongomock when connecting with  | To use with mongoengine, simply specify mongomock when connecting with | ||||||
| mongoengine: | mongoengine: | ||||||
|  |  | ||||||
| .. code-block:: python | .. code-block:: python | ||||||
| @@ -21,7 +21,7 @@ or with an alias: | |||||||
|     conn = get_connection('testdb') |     conn = get_connection('testdb') | ||||||
|  |  | ||||||
| Example of test file: | Example of test file: | ||||||
| -------- | --------------------- | ||||||
| .. code-block:: python | .. code-block:: python | ||||||
|  |  | ||||||
|     import unittest |     import unittest | ||||||
| @@ -45,4 +45,4 @@ Example of test file: | |||||||
|             pers.save() |             pers.save() | ||||||
|  |  | ||||||
|             fresh_pers = Person.objects().first() |             fresh_pers = Person.objects().first() | ||||||
|             self.assertEqual(fresh_pers.name, 'John') |             assert fresh_pers.name ==  'John' | ||||||
|   | |||||||
| @@ -222,6 +222,18 @@ keyword argument:: | |||||||
|  |  | ||||||
| .. versionadded:: 0.4 | .. versionadded:: 0.4 | ||||||
|  |  | ||||||
|  | Sorting/Ordering results | ||||||
|  | ======================== | ||||||
|  | It is possible to order the results by 1 or more keys using :meth:`~mongoengine.queryset.QuerySet.order_by`. | ||||||
|  | The order may be specified by prepending each of the keys by "+" or "-". Ascending order is assumed if there's no prefix.:: | ||||||
|  |  | ||||||
|  |     # Order by ascending date | ||||||
|  |     blogs = BlogPost.objects().order_by('date')    # equivalent to .order_by('+date') | ||||||
|  |  | ||||||
|  |     # Order by ascending date first, then descending title | ||||||
|  |     blogs = BlogPost.objects().order_by('+date', '-title') | ||||||
|  |  | ||||||
|  |  | ||||||
| Limiting and skipping results | Limiting and skipping results | ||||||
| ============================= | ============================= | ||||||
| Just as with traditional ORMs, you may limit the number of results returned or | Just as with traditional ORMs, you may limit the number of results returned or | ||||||
| @@ -349,9 +361,9 @@ Just as with limiting and skipping results, there is a method on a | |||||||
| You could technically use ``len(User.objects)`` to get the same result, but it | You could technically use ``len(User.objects)`` to get the same result, but it | ||||||
| would be significantly slower than :meth:`~mongoengine.queryset.QuerySet.count`. | would be significantly slower than :meth:`~mongoengine.queryset.QuerySet.count`. | ||||||
| When you execute a server-side count query, you let MongoDB do the heavy | When you execute a server-side count query, you let MongoDB do the heavy | ||||||
| lifting and you receive a single integer over the wire. Meanwhile, len() | lifting and you receive a single integer over the wire. Meanwhile, ``len()`` | ||||||
| retrieves all the results, places them in a local cache, and finally counts | retrieves all the results, places them in a local cache, and finally counts | ||||||
| them. If we compare the performance of the two operations, len() is much slower | them. If we compare the performance of the two operations, ``len()`` is much slower | ||||||
| than :meth:`~mongoengine.queryset.QuerySet.count`. | than :meth:`~mongoengine.queryset.QuerySet.count`. | ||||||
|  |  | ||||||
| Further aggregation | Further aggregation | ||||||
| @@ -386,6 +398,25 @@ would be generating "tag-clouds":: | |||||||
|     top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10] |     top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | MongoDB aggregation API | ||||||
|  | ----------------------- | ||||||
|  | If you need to run aggregation pipelines, MongoEngine provides an entry point to `Pymongo's aggregation framework <https://api.mongodb.com/python/current/examples/aggregation.html#aggregation-framework>`_ | ||||||
|  | through :meth:`~mongoengine.queryset.QuerySet.aggregate`. Check out Pymongo's documentation for the syntax and pipeline. | ||||||
|  | An example of its use would be:: | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         Person(name='John').save() | ||||||
|  |         Person(name='Bob').save() | ||||||
|  |  | ||||||
|  |         pipeline = [ | ||||||
|  |             {"$sort" : {"name" : -1}}, | ||||||
|  |             {"$project": {"_id": 0, "name": {"$toUpper": "$name"}}} | ||||||
|  |             ] | ||||||
|  |         data = Person.objects().aggregate(pipeline) | ||||||
|  |         assert data == [{'name': 'BOB'}, {'name': 'JOHN'}] | ||||||
|  |  | ||||||
| Query efficiency and performance | Query efficiency and performance | ||||||
| ================================ | ================================ | ||||||
|  |  | ||||||
| @@ -566,7 +597,8 @@ cannot use the `$` syntax in keyword arguments it has been mapped to `S`:: | |||||||
|     ['database', 'mongodb'] |     ['database', 'mongodb'] | ||||||
|  |  | ||||||
| From MongoDB version 2.6, push operator supports $position value which allows | From MongoDB version 2.6, push operator supports $position value which allows | ||||||
| to push values with index. | to push values with index:: | ||||||
|  |  | ||||||
|     >>> post = BlogPost(title="Test", tags=["mongo"]) |     >>> post = BlogPost(title="Test", tags=["mongo"]) | ||||||
|     >>> post.save() |     >>> post.save() | ||||||
|     >>> post.update(push__tags__0=["database", "code"]) |     >>> post.update(push__tags__0=["database", "code"]) | ||||||
|   | |||||||
| @@ -44,8 +44,8 @@ Available signals include: | |||||||
|  |  | ||||||
| `post_save` | `post_save` | ||||||
|   Called within :meth:`~mongoengine.Document.save` after most actions |   Called within :meth:`~mongoengine.Document.save` after most actions | ||||||
|   (validation, insert/update, and cascades, but not clearing dirty flags) have  |   (validation, insert/update, and cascades, but not clearing dirty flags) have | ||||||
|   completed successfully.  Passed the additional boolean keyword argument  |   completed successfully.  Passed the additional boolean keyword argument | ||||||
|   `created` to indicate if the save was an insert or an update. |   `created` to indicate if the save was an insert or an update. | ||||||
|  |  | ||||||
| `pre_delete` | `pre_delete` | ||||||
|   | |||||||
| @@ -8,7 +8,7 @@ After MongoDB 2.4 version, supports search documents by text indexes. | |||||||
| Defining a Document with text index | Defining a Document with text index | ||||||
| =================================== | =================================== | ||||||
| Use the *$* prefix to set a text index, Look the declaration:: | Use the *$* prefix to set a text index, Look the declaration:: | ||||||
|    |  | ||||||
|   class News(Document): |   class News(Document): | ||||||
|       title = StringField() |       title = StringField() | ||||||
|       content = StringField() |       content = StringField() | ||||||
| @@ -35,10 +35,10 @@ Saving a document:: | |||||||
|        content="Various improvements").save() |        content="Various improvements").save() | ||||||
|  |  | ||||||
| Next, start a text search using :attr:`QuerySet.search_text` method:: | Next, start a text search using :attr:`QuerySet.search_text` method:: | ||||||
|    |  | ||||||
|   document = News.objects.search_text('testing').first() |   document = News.objects.search_text('testing').first() | ||||||
|   document.title # may be: "Using mongodb text search" |   document.title # may be: "Using mongodb text search" | ||||||
|    |  | ||||||
|   document = News.objects.search_text('released').first() |   document = News.objects.search_text('released').first() | ||||||
|   document.title # may be: "MongoEngine 0.9 released" |   document.title # may be: "MongoEngine 0.9 released" | ||||||
|  |  | ||||||
|   | |||||||
| @@ -7,7 +7,7 @@ MongoDB. To install it, simply run | |||||||
|  |  | ||||||
| .. code-block:: console | .. code-block:: console | ||||||
|  |  | ||||||
|     $ pip install -U mongoengine |     $ python -m pip install -U mongoengine | ||||||
|  |  | ||||||
| :doc:`tutorial` | :doc:`tutorial` | ||||||
|   A quick tutorial building a tumblelog to get you up and running with |   A quick tutorial building a tumblelog to get you up and running with | ||||||
| @@ -23,9 +23,18 @@ MongoDB. To install it, simply run | |||||||
| :doc:`upgrade` | :doc:`upgrade` | ||||||
|   How to upgrade MongoEngine. |   How to upgrade MongoEngine. | ||||||
|  |  | ||||||
|  | :doc:`faq` | ||||||
|  |   Frequently Asked Questions | ||||||
|  |  | ||||||
| :doc:`django` | :doc:`django` | ||||||
|   Using MongoEngine and Django |   Using MongoEngine and Django | ||||||
|  |  | ||||||
|  | MongoDB and driver support | ||||||
|  | -------------------------- | ||||||
|  |  | ||||||
|  | MongoEngine is based on the PyMongo driver and tested against multiple versions of MongoDB. | ||||||
|  | For further details, please refer to the `readme <https://github.com/MongoEngine/mongoengine#mongoengine>`_. | ||||||
|  |  | ||||||
| Community | Community | ||||||
| --------- | --------- | ||||||
|  |  | ||||||
| @@ -73,6 +82,7 @@ formats for offline reading. | |||||||
|     apireference |     apireference | ||||||
|     changelog |     changelog | ||||||
|     upgrade |     upgrade | ||||||
|  |     faq | ||||||
|     django |     django | ||||||
|  |  | ||||||
| Indices and tables | Indices and tables | ||||||
| @@ -81,4 +91,3 @@ Indices and tables | |||||||
| * :ref:`genindex` | * :ref:`genindex` | ||||||
| * :ref:`modindex` | * :ref:`modindex` | ||||||
| * :ref:`search` | * :ref:`search` | ||||||
|  |  | ||||||
|   | |||||||
| @@ -18,7 +18,7 @@ location --- running it locally will be easier, but if that is not an option | |||||||
| then it may be run on a remote server. If you haven't installed MongoEngine, | then it may be run on a remote server. If you haven't installed MongoEngine, | ||||||
| simply use pip to install it like so:: | simply use pip to install it like so:: | ||||||
|  |  | ||||||
|     $ pip install mongoengine |     $ python -m pip install mongoengine | ||||||
|  |  | ||||||
| Before we can start using MongoEngine, we need to tell it how to connect to our | Before we can start using MongoEngine, we need to tell it how to connect to our | ||||||
| instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect` | instance of :program:`mongod`. For this we use the :func:`~mongoengine.connect` | ||||||
|   | |||||||
| @@ -52,7 +52,7 @@ rename its occurrences. | |||||||
| This release includes a major rehaul of MongoEngine's code quality and | This release includes a major rehaul of MongoEngine's code quality and | ||||||
| introduces a few breaking changes. It also touches many different parts of | introduces a few breaking changes. It also touches many different parts of | ||||||
| the package and although all the changes have been tested and scrutinized, | the package and although all the changes have been tested and scrutinized, | ||||||
| you're encouraged to thorougly test the upgrade. | you're encouraged to thoroughly test the upgrade. | ||||||
|  |  | ||||||
| First breaking change involves renaming `ConnectionError` to `MongoEngineConnectionError`. | First breaking change involves renaming `ConnectionError` to `MongoEngineConnectionError`. | ||||||
| If you import or catch this exception, you'll need to rename it in your code. | If you import or catch this exception, you'll need to rename it in your code. | ||||||
| @@ -85,10 +85,10 @@ by default from now on. | |||||||
|  |  | ||||||
| The 0.8.7 package on pypi was corrupted.  If upgrading from 0.8.7 to 0.9.0 please follow: :: | The 0.8.7 package on pypi was corrupted.  If upgrading from 0.8.7 to 0.9.0 please follow: :: | ||||||
|  |  | ||||||
|     pip uninstall pymongo |     python -m pip uninstall pymongo | ||||||
|     pip uninstall mongoengine |     python -m pip uninstall mongoengine | ||||||
|     pip install pymongo==2.8 |     python -m pip install pymongo==2.8 | ||||||
|     pip install mongoengine |     python -m pip install mongoengine | ||||||
|  |  | ||||||
| 0.8.7 | 0.8.7 | ||||||
| ***** | ***** | ||||||
| @@ -153,7 +153,7 @@ inherited classes like so: :: | |||||||
|  |  | ||||||
|     # 4. Remove indexes |     # 4. Remove indexes | ||||||
|     info = collection.index_information() |     info = collection.index_information() | ||||||
|     indexes_to_drop = [key for key, value in info.iteritems() |     indexes_to_drop = [key for key, value in info.items() | ||||||
|                        if '_types' in dict(value['key'])] |                        if '_types' in dict(value['key'])] | ||||||
|     for index in indexes_to_drop: |     for index in indexes_to_drop: | ||||||
|         collection.drop_index(index) |         collection.drop_index(index) | ||||||
|   | |||||||
| @@ -18,12 +18,17 @@ from mongoengine.queryset import * | |||||||
| from mongoengine.signals import * | from mongoengine.signals import * | ||||||
|  |  | ||||||
|  |  | ||||||
| __all__ = (list(document.__all__) + list(fields.__all__) + | __all__ = ( | ||||||
|            list(connection.__all__) + list(queryset.__all__) + |     list(document.__all__) | ||||||
|            list(signals.__all__) + list(errors.__all__)) |     + list(fields.__all__) | ||||||
|  |     + list(connection.__all__) | ||||||
|  |     + list(queryset.__all__) | ||||||
|  |     + list(signals.__all__) | ||||||
|  |     + list(errors.__all__) | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
| VERSION = (0, 18, 0) | VERSION = (0, 20, 0) | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_version(): | def get_version(): | ||||||
| @@ -31,7 +36,7 @@ def get_version(): | |||||||
|  |  | ||||||
|     For example, if `VERSION == (0, 10, 7)`, return '0.10.7'. |     For example, if `VERSION == (0, 10, 7)`, return '0.10.7'. | ||||||
|     """ |     """ | ||||||
|     return '.'.join(map(str, VERSION)) |     return ".".join(map(str, VERSION)) | ||||||
|  |  | ||||||
|  |  | ||||||
| __version__ = get_version() | __version__ = get_version() | ||||||
|   | |||||||
| @@ -12,17 +12,22 @@ from mongoengine.base.metaclasses import * | |||||||
|  |  | ||||||
| __all__ = ( | __all__ = ( | ||||||
|     # common |     # common | ||||||
|     'UPDATE_OPERATORS', '_document_registry', 'get_document', |     "UPDATE_OPERATORS", | ||||||
|  |     "_document_registry", | ||||||
|  |     "get_document", | ||||||
|     # datastructures |     # datastructures | ||||||
|     'BaseDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference', |     "BaseDict", | ||||||
|  |     "BaseList", | ||||||
|  |     "EmbeddedDocumentList", | ||||||
|  |     "LazyReference", | ||||||
|     # document |     # document | ||||||
|     'BaseDocument', |     "BaseDocument", | ||||||
|  |  | ||||||
|     # fields |     # fields | ||||||
|     'BaseField', 'ComplexBaseField', 'ObjectIdField', 'GeoJsonBaseField', |     "BaseField", | ||||||
|  |     "ComplexBaseField", | ||||||
|  |     "ObjectIdField", | ||||||
|  |     "GeoJsonBaseField", | ||||||
|     # metaclasses |     # metaclasses | ||||||
|     'DocumentMetaclass', 'TopLevelDocumentMetaclass' |     "DocumentMetaclass", | ||||||
|  |     "TopLevelDocumentMetaclass", | ||||||
| ) | ) | ||||||
|   | |||||||
| @@ -1,12 +1,25 @@ | |||||||
| from mongoengine.errors import NotRegistered | from mongoengine.errors import NotRegistered | ||||||
|  |  | ||||||
| __all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry') | __all__ = ("UPDATE_OPERATORS", "get_document", "_document_registry") | ||||||
|  |  | ||||||
|  |  | ||||||
| UPDATE_OPERATORS = {'set', 'unset', 'inc', 'dec', 'mul', | UPDATE_OPERATORS = { | ||||||
|                     'pop', 'push', 'push_all', 'pull', |     "set", | ||||||
|                     'pull_all', 'add_to_set', 'set_on_insert', |     "unset", | ||||||
|                     'min', 'max', 'rename'} |     "inc", | ||||||
|  |     "dec", | ||||||
|  |     "mul", | ||||||
|  |     "pop", | ||||||
|  |     "push", | ||||||
|  |     "push_all", | ||||||
|  |     "pull", | ||||||
|  |     "pull_all", | ||||||
|  |     "add_to_set", | ||||||
|  |     "set_on_insert", | ||||||
|  |     "min", | ||||||
|  |     "max", | ||||||
|  |     "rename", | ||||||
|  | } | ||||||
|  |  | ||||||
|  |  | ||||||
| _document_registry = {} | _document_registry = {} | ||||||
| @@ -17,25 +30,33 @@ def get_document(name): | |||||||
|     doc = _document_registry.get(name, None) |     doc = _document_registry.get(name, None) | ||||||
|     if not doc: |     if not doc: | ||||||
|         # Possible old style name |         # Possible old style name | ||||||
|         single_end = name.split('.')[-1] |         single_end = name.split(".")[-1] | ||||||
|         compound_end = '.%s' % single_end |         compound_end = ".%s" % single_end | ||||||
|         possible_match = [k for k in _document_registry |         possible_match = [ | ||||||
|                           if k.endswith(compound_end) or k == single_end] |             k for k in _document_registry if k.endswith(compound_end) or k == single_end | ||||||
|  |         ] | ||||||
|         if len(possible_match) == 1: |         if len(possible_match) == 1: | ||||||
|             doc = _document_registry.get(possible_match.pop(), None) |             doc = _document_registry.get(possible_match.pop(), None) | ||||||
|     if not doc: |     if not doc: | ||||||
|         raise NotRegistered(""" |         raise NotRegistered( | ||||||
|  |             """ | ||||||
|             `%s` has not been registered in the document registry. |             `%s` has not been registered in the document registry. | ||||||
|             Importing the document class automatically registers it, has it |             Importing the document class automatically registers it, has it | ||||||
|             been imported? |             been imported? | ||||||
|         """.strip() % name) |         """.strip() | ||||||
|  |             % name | ||||||
|  |         ) | ||||||
|     return doc |     return doc | ||||||
|  |  | ||||||
|  |  | ||||||
| def _get_documents_by_db(connection_alias, default_connection_alias): | def _get_documents_by_db(connection_alias, default_connection_alias): | ||||||
|     """Get all registered Documents class attached to a given database""" |     """Get all registered Documents class attached to a given database""" | ||||||
|     def get_doc_alias(doc_cls): |  | ||||||
|         return doc_cls._meta.get('db_alias', default_connection_alias) |  | ||||||
|  |  | ||||||
|     return [doc_cls for doc_cls in _document_registry.values() |     def get_doc_alias(doc_cls): | ||||||
|             if get_doc_alias(doc_cls) == connection_alias] |         return doc_cls._meta.get("db_alias", default_connection_alias) | ||||||
|  |  | ||||||
|  |     return [ | ||||||
|  |         doc_cls | ||||||
|  |         for doc_cls in _document_registry.values() | ||||||
|  |         if get_doc_alias(doc_cls) == connection_alias | ||||||
|  |     ] | ||||||
|   | |||||||
| @@ -1,30 +1,40 @@ | |||||||
| import weakref | import weakref | ||||||
|  |  | ||||||
| from bson import DBRef | from bson import DBRef | ||||||
| import six |  | ||||||
| from six import iteritems |  | ||||||
|  |  | ||||||
| from mongoengine.common import _import_class | from mongoengine.common import _import_class | ||||||
| from mongoengine.errors import DoesNotExist, MultipleObjectsReturned | from mongoengine.errors import DoesNotExist, MultipleObjectsReturned | ||||||
|  |  | ||||||
| __all__ = ('BaseDict', 'StrictDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference') | __all__ = ( | ||||||
|  |     "BaseDict", | ||||||
|  |     "StrictDict", | ||||||
|  |     "BaseList", | ||||||
|  |     "EmbeddedDocumentList", | ||||||
|  |     "LazyReference", | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
| def mark_as_changed_wrapper(parent_method): | def mark_as_changed_wrapper(parent_method): | ||||||
|     """Decorators that ensures _mark_as_changed method gets called""" |     """Decorator that ensures _mark_as_changed method gets called.""" | ||||||
|  |  | ||||||
|     def wrapper(self, *args, **kwargs): |     def wrapper(self, *args, **kwargs): | ||||||
|         result = parent_method(self, *args, **kwargs)   # Can't use super() in the decorator |         # Can't use super() in the decorator. | ||||||
|  |         result = parent_method(self, *args, **kwargs) | ||||||
|         self._mark_as_changed() |         self._mark_as_changed() | ||||||
|         return result |         return result | ||||||
|  |  | ||||||
|     return wrapper |     return wrapper | ||||||
|  |  | ||||||
|  |  | ||||||
| def mark_key_as_changed_wrapper(parent_method): | def mark_key_as_changed_wrapper(parent_method): | ||||||
|     """Decorators that ensures _mark_as_changed method gets called with the key argument""" |     """Decorator that ensures _mark_as_changed method gets called with the key argument""" | ||||||
|  |  | ||||||
|     def wrapper(self, key, *args, **kwargs): |     def wrapper(self, key, *args, **kwargs): | ||||||
|         result = parent_method(self, key, *args, **kwargs)   # Can't use super() in the decorator |         # Can't use super() in the decorator. | ||||||
|  |         result = parent_method(self, key, *args, **kwargs) | ||||||
|         self._mark_as_changed(key) |         self._mark_as_changed(key) | ||||||
|         return result |         return result | ||||||
|  |  | ||||||
|     return wrapper |     return wrapper | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -36,12 +46,12 @@ class BaseDict(dict): | |||||||
|     _name = None |     _name = None | ||||||
|  |  | ||||||
|     def __init__(self, dict_items, instance, name): |     def __init__(self, dict_items, instance, name): | ||||||
|         BaseDocument = _import_class('BaseDocument') |         BaseDocument = _import_class("BaseDocument") | ||||||
|  |  | ||||||
|         if isinstance(instance, BaseDocument): |         if isinstance(instance, BaseDocument): | ||||||
|             self._instance = weakref.proxy(instance) |             self._instance = weakref.proxy(instance) | ||||||
|         self._name = name |         self._name = name | ||||||
|         super(BaseDict, self).__init__(dict_items) |         super().__init__(dict_items) | ||||||
|  |  | ||||||
|     def get(self, key, default=None): |     def get(self, key, default=None): | ||||||
|         # get does not use __getitem__ by default so we must override it as well |         # get does not use __getitem__ by default so we must override it as well | ||||||
| @@ -51,18 +61,18 @@ class BaseDict(dict): | |||||||
|             return default |             return default | ||||||
|  |  | ||||||
|     def __getitem__(self, key): |     def __getitem__(self, key): | ||||||
|         value = super(BaseDict, self).__getitem__(key) |         value = super().__getitem__(key) | ||||||
|  |  | ||||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') |         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||||
|         if isinstance(value, EmbeddedDocument) and value._instance is None: |         if isinstance(value, EmbeddedDocument) and value._instance is None: | ||||||
|             value._instance = self._instance |             value._instance = self._instance | ||||||
|         elif isinstance(value, dict) and not isinstance(value, BaseDict): |         elif isinstance(value, dict) and not isinstance(value, BaseDict): | ||||||
|             value = BaseDict(value, None, '%s.%s' % (self._name, key)) |             value = BaseDict(value, None, "{}.{}".format(self._name, key)) | ||||||
|             super(BaseDict, self).__setitem__(key, value) |             super().__setitem__(key, value) | ||||||
|             value._instance = self._instance |             value._instance = self._instance | ||||||
|         elif isinstance(value, list) and not isinstance(value, BaseList): |         elif isinstance(value, list) and not isinstance(value, BaseList): | ||||||
|             value = BaseList(value, None, '%s.%s' % (self._name, key)) |             value = BaseList(value, None, "{}.{}".format(self._name, key)) | ||||||
|             super(BaseDict, self).__setitem__(key, value) |             super().__setitem__(key, value) | ||||||
|             value._instance = self._instance |             value._instance = self._instance | ||||||
|         return value |         return value | ||||||
|  |  | ||||||
| @@ -85,9 +95,9 @@ class BaseDict(dict): | |||||||
|     setdefault = mark_as_changed_wrapper(dict.setdefault) |     setdefault = mark_as_changed_wrapper(dict.setdefault) | ||||||
|  |  | ||||||
|     def _mark_as_changed(self, key=None): |     def _mark_as_changed(self, key=None): | ||||||
|         if hasattr(self._instance, '_mark_as_changed'): |         if hasattr(self._instance, "_mark_as_changed"): | ||||||
|             if key: |             if key: | ||||||
|                 self._instance._mark_as_changed('%s.%s' % (self._name, key)) |                 self._instance._mark_as_changed("{}.{}".format(self._name, key)) | ||||||
|             else: |             else: | ||||||
|                 self._instance._mark_as_changed(self._name) |                 self._instance._mark_as_changed(self._name) | ||||||
|  |  | ||||||
| @@ -100,39 +110,41 @@ class BaseList(list): | |||||||
|     _name = None |     _name = None | ||||||
|  |  | ||||||
|     def __init__(self, list_items, instance, name): |     def __init__(self, list_items, instance, name): | ||||||
|         BaseDocument = _import_class('BaseDocument') |         BaseDocument = _import_class("BaseDocument") | ||||||
|  |  | ||||||
|         if isinstance(instance, BaseDocument): |         if isinstance(instance, BaseDocument): | ||||||
|             self._instance = weakref.proxy(instance) |             self._instance = weakref.proxy(instance) | ||||||
|         self._name = name |         self._name = name | ||||||
|         super(BaseList, self).__init__(list_items) |         super().__init__(list_items) | ||||||
|  |  | ||||||
|     def __getitem__(self, key): |     def __getitem__(self, key): | ||||||
|         value = super(BaseList, self).__getitem__(key) |         # change index to positive value because MongoDB does not support negative one | ||||||
|  |         if isinstance(key, int) and key < 0: | ||||||
|  |             key = len(self) + key | ||||||
|  |         value = super().__getitem__(key) | ||||||
|  |  | ||||||
|         if isinstance(key, slice): |         if isinstance(key, slice): | ||||||
|             # When receiving a slice operator, we don't convert the structure and bind |             # When receiving a slice operator, we don't convert the structure and bind | ||||||
|             # to parent's instance. This is buggy for now but would require more work to be handled properly |             # to parent's instance. This is buggy for now but would require more work to be handled properly | ||||||
|             return value |             return value | ||||||
|  |  | ||||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') |         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||||
|         if isinstance(value, EmbeddedDocument) and value._instance is None: |         if isinstance(value, EmbeddedDocument) and value._instance is None: | ||||||
|             value._instance = self._instance |             value._instance = self._instance | ||||||
|         elif isinstance(value, dict) and not isinstance(value, BaseDict): |         elif isinstance(value, dict) and not isinstance(value, BaseDict): | ||||||
|             # Replace dict by BaseDict |             # Replace dict by BaseDict | ||||||
|             value = BaseDict(value, None, '%s.%s' % (self._name, key)) |             value = BaseDict(value, None, "{}.{}".format(self._name, key)) | ||||||
|             super(BaseList, self).__setitem__(key, value) |             super().__setitem__(key, value) | ||||||
|             value._instance = self._instance |             value._instance = self._instance | ||||||
|         elif isinstance(value, list) and not isinstance(value, BaseList): |         elif isinstance(value, list) and not isinstance(value, BaseList): | ||||||
|             # Replace list by BaseList |             # Replace list by BaseList | ||||||
|             value = BaseList(value, None, '%s.%s' % (self._name, key)) |             value = BaseList(value, None, "{}.{}".format(self._name, key)) | ||||||
|             super(BaseList, self).__setitem__(key, value) |             super().__setitem__(key, value) | ||||||
|             value._instance = self._instance |             value._instance = self._instance | ||||||
|         return value |         return value | ||||||
|  |  | ||||||
|     def __iter__(self): |     def __iter__(self): | ||||||
|         for v in super(BaseList, self).__iter__(): |         yield from super().__iter__() | ||||||
|             yield v |  | ||||||
|  |  | ||||||
|     def __getstate__(self): |     def __getstate__(self): | ||||||
|         self.instance = None |         self.instance = None | ||||||
| @@ -150,7 +162,7 @@ class BaseList(list): | |||||||
|             # instead, we simply marks the whole list as changed |             # instead, we simply marks the whole list as changed | ||||||
|             changed_key = None |             changed_key = None | ||||||
|  |  | ||||||
|         result = super(BaseList, self).__setitem__(key, value) |         result = super().__setitem__(key, value) | ||||||
|         self._mark_as_changed(changed_key) |         self._mark_as_changed(changed_key) | ||||||
|         return result |         return result | ||||||
|  |  | ||||||
| @@ -165,33 +177,19 @@ class BaseList(list): | |||||||
|     __iadd__ = mark_as_changed_wrapper(list.__iadd__) |     __iadd__ = mark_as_changed_wrapper(list.__iadd__) | ||||||
|     __imul__ = mark_as_changed_wrapper(list.__imul__) |     __imul__ = mark_as_changed_wrapper(list.__imul__) | ||||||
|  |  | ||||||
|     if six.PY2: |  | ||||||
|         # Under py3 __setslice__, __delslice__ and __getslice__ |  | ||||||
|         # are replaced by __setitem__, __delitem__ and __getitem__ with a slice as parameter |  | ||||||
|         # so we mimic this under python 2 |  | ||||||
|         def __setslice__(self, i, j, sequence): |  | ||||||
|             return self.__setitem__(slice(i, j), sequence) |  | ||||||
|  |  | ||||||
|         def __delslice__(self, i, j): |  | ||||||
|             return self.__delitem__(slice(i, j)) |  | ||||||
|  |  | ||||||
|         def __getslice__(self, i, j): |  | ||||||
|             return self.__getitem__(slice(i, j)) |  | ||||||
|  |  | ||||||
|     def _mark_as_changed(self, key=None): |     def _mark_as_changed(self, key=None): | ||||||
|         if hasattr(self._instance, '_mark_as_changed'): |         if hasattr(self._instance, "_mark_as_changed"): | ||||||
|             if key: |             if key: | ||||||
|                 self._instance._mark_as_changed( |                 self._instance._mark_as_changed( | ||||||
|                     '%s.%s' % (self._name, key % len(self)) |                     "{}.{}".format(self._name, key % len(self)) | ||||||
|                 ) |                 ) | ||||||
|             else: |             else: | ||||||
|                 self._instance._mark_as_changed(self._name) |                 self._instance._mark_as_changed(self._name) | ||||||
|  |  | ||||||
|  |  | ||||||
| class EmbeddedDocumentList(BaseList): | class EmbeddedDocumentList(BaseList): | ||||||
|  |  | ||||||
|     def __init__(self, list_items, instance, name): |     def __init__(self, list_items, instance, name): | ||||||
|         super(EmbeddedDocumentList, self).__init__(list_items, instance, name) |         super().__init__(list_items, instance, name) | ||||||
|         self._instance = instance |         self._instance = instance | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
| @@ -201,7 +199,7 @@ class EmbeddedDocumentList(BaseList): | |||||||
|         """ |         """ | ||||||
|         for key, expected_value in kwargs.items(): |         for key, expected_value in kwargs.items(): | ||||||
|             doc_val = getattr(embedded_doc, key) |             doc_val = getattr(embedded_doc, key) | ||||||
|             if doc_val != expected_value and six.text_type(doc_val) != expected_value: |             if doc_val != expected_value and str(doc_val) != expected_value: | ||||||
|                 return False |                 return False | ||||||
|         return True |         return True | ||||||
|  |  | ||||||
| @@ -274,12 +272,10 @@ class EmbeddedDocumentList(BaseList): | |||||||
|         """ |         """ | ||||||
|         values = self.__only_matches(self, kwargs) |         values = self.__only_matches(self, kwargs) | ||||||
|         if len(values) == 0: |         if len(values) == 0: | ||||||
|             raise DoesNotExist( |             raise DoesNotExist("%s matching query does not exist." % self._name) | ||||||
|                 '%s matching query does not exist.' % self._name |  | ||||||
|             ) |  | ||||||
|         elif len(values) > 1: |         elif len(values) > 1: | ||||||
|             raise MultipleObjectsReturned( |             raise MultipleObjectsReturned( | ||||||
|                 '%d items returned, instead of 1' % len(values) |                 "%d items returned, instead of 1" % len(values) | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         return values[0] |         return values[0] | ||||||
| @@ -293,11 +289,11 @@ class EmbeddedDocumentList(BaseList): | |||||||
|  |  | ||||||
|     def create(self, **values): |     def create(self, **values): | ||||||
|         """ |         """ | ||||||
|         Creates a new embedded document and saves it to the database. |         Creates a new instance of the EmbeddedDocument and appends it to this EmbeddedDocumentList. | ||||||
|  |  | ||||||
|         .. note:: |         .. note:: | ||||||
|             The embedded document changes are not automatically saved |             the instance of the EmbeddedDocument is not automatically saved to the database. | ||||||
|             to the database after calling this method. |             You still need to call .save() on the parent Document. | ||||||
|  |  | ||||||
|         :param values: A dictionary of values for the embedded document. |         :param values: A dictionary of values for the embedded document. | ||||||
|         :return: The new embedded document instance. |         :return: The new embedded document instance. | ||||||
| @@ -358,24 +354,24 @@ class EmbeddedDocumentList(BaseList): | |||||||
|         return len(values) |         return len(values) | ||||||
|  |  | ||||||
|  |  | ||||||
| class StrictDict(object): | class StrictDict: | ||||||
|     __slots__ = () |     __slots__ = () | ||||||
|     _special_fields = {'get', 'pop', 'iteritems', 'items', 'keys', 'create'} |     _special_fields = {"get", "pop", "iteritems", "items", "keys", "create"} | ||||||
|     _classes = {} |     _classes = {} | ||||||
|  |  | ||||||
|     def __init__(self, **kwargs): |     def __init__(self, **kwargs): | ||||||
|         for k, v in iteritems(kwargs): |         for k, v in kwargs.items(): | ||||||
|             setattr(self, k, v) |             setattr(self, k, v) | ||||||
|  |  | ||||||
|     def __getitem__(self, key): |     def __getitem__(self, key): | ||||||
|         key = '_reserved_' + key if key in self._special_fields else key |         key = "_reserved_" + key if key in self._special_fields else key | ||||||
|         try: |         try: | ||||||
|             return getattr(self, key) |             return getattr(self, key) | ||||||
|         except AttributeError: |         except AttributeError: | ||||||
|             raise KeyError(key) |             raise KeyError(key) | ||||||
|  |  | ||||||
|     def __setitem__(self, key, value): |     def __setitem__(self, key, value): | ||||||
|         key = '_reserved_' + key if key in self._special_fields else key |         key = "_reserved_" + key if key in self._special_fields else key | ||||||
|         return setattr(self, key, value) |         return setattr(self, key, value) | ||||||
|  |  | ||||||
|     def __contains__(self, key): |     def __contains__(self, key): | ||||||
| @@ -412,37 +408,42 @@ class StrictDict(object): | |||||||
|         return (key for key in self.__slots__ if hasattr(self, key)) |         return (key for key in self.__slots__ if hasattr(self, key)) | ||||||
|  |  | ||||||
|     def __len__(self): |     def __len__(self): | ||||||
|         return len(list(iteritems(self))) |         return len(list(self.items())) | ||||||
|  |  | ||||||
|     def __eq__(self, other): |     def __eq__(self, other): | ||||||
|         return self.items() == other.items() |         return list(self.items()) == list(other.items()) | ||||||
|  |  | ||||||
|     def __ne__(self, other): |     def __ne__(self, other): | ||||||
|         return self.items() != other.items() |         return not (self == other) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def create(cls, allowed_keys): |     def create(cls, allowed_keys): | ||||||
|         allowed_keys_tuple = tuple(('_reserved_' + k if k in cls._special_fields else k) for k in allowed_keys) |         allowed_keys_tuple = tuple( | ||||||
|  |             ("_reserved_" + k if k in cls._special_fields else k) for k in allowed_keys | ||||||
|  |         ) | ||||||
|         allowed_keys = frozenset(allowed_keys_tuple) |         allowed_keys = frozenset(allowed_keys_tuple) | ||||||
|         if allowed_keys not in cls._classes: |         if allowed_keys not in cls._classes: | ||||||
|  |  | ||||||
|             class SpecificStrictDict(cls): |             class SpecificStrictDict(cls): | ||||||
|                 __slots__ = allowed_keys_tuple |                 __slots__ = allowed_keys_tuple | ||||||
|  |  | ||||||
|                 def __repr__(self): |                 def __repr__(self): | ||||||
|                     return '{%s}' % ', '.join('"{0!s}": {1!r}'.format(k, v) for k, v in self.items()) |                     return "{%s}" % ", ".join( | ||||||
|  |                         '"{!s}": {!r}'.format(k, v) for k, v in self.items() | ||||||
|  |                     ) | ||||||
|  |  | ||||||
|             cls._classes[allowed_keys] = SpecificStrictDict |             cls._classes[allowed_keys] = SpecificStrictDict | ||||||
|         return cls._classes[allowed_keys] |         return cls._classes[allowed_keys] | ||||||
|  |  | ||||||
|  |  | ||||||
| class LazyReference(DBRef): | class LazyReference(DBRef): | ||||||
|     __slots__ = ('_cached_doc', 'passthrough', 'document_type') |     __slots__ = ("_cached_doc", "passthrough", "document_type") | ||||||
|  |  | ||||||
|     def fetch(self, force=False): |     def fetch(self, force=False): | ||||||
|         if not self._cached_doc or force: |         if not self._cached_doc or force: | ||||||
|             self._cached_doc = self.document_type.objects.get(pk=self.pk) |             self._cached_doc = self.document_type.objects.get(pk=self.pk) | ||||||
|             if not self._cached_doc: |             if not self._cached_doc: | ||||||
|                 raise DoesNotExist('Trying to dereference unknown document %s' % (self)) |                 raise DoesNotExist("Trying to dereference unknown document %s" % (self)) | ||||||
|         return self._cached_doc |         return self._cached_doc | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
| @@ -453,7 +454,7 @@ class LazyReference(DBRef): | |||||||
|         self.document_type = document_type |         self.document_type = document_type | ||||||
|         self._cached_doc = cached_doc |         self._cached_doc = cached_doc | ||||||
|         self.passthrough = passthrough |         self.passthrough = passthrough | ||||||
|         super(LazyReference, self).__init__(self.document_type._get_collection_name(), pk) |         super().__init__(self.document_type._get_collection_name(), pk) | ||||||
|  |  | ||||||
|     def __getitem__(self, name): |     def __getitem__(self, name): | ||||||
|         if not self.passthrough: |         if not self.passthrough: | ||||||
| @@ -462,7 +463,7 @@ class LazyReference(DBRef): | |||||||
|         return document[name] |         return document[name] | ||||||
|  |  | ||||||
|     def __getattr__(self, name): |     def __getattr__(self, name): | ||||||
|         if not object.__getattribute__(self, 'passthrough'): |         if not object.__getattribute__(self, "passthrough"): | ||||||
|             raise AttributeError() |             raise AttributeError() | ||||||
|         document = self.fetch() |         document = self.fetch() | ||||||
|         try: |         try: | ||||||
| @@ -471,4 +472,4 @@ class LazyReference(DBRef): | |||||||
|             raise AttributeError() |             raise AttributeError() | ||||||
|  |  | ||||||
|     def __repr__(self): |     def __repr__(self): | ||||||
|         return "<LazyReference(%s, %r)>" % (self.document_type, self.pk) |         return "<LazyReference({}, {!r})>".format(self.document_type, self.pk) | ||||||
|   | |||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -4,25 +4,22 @@ import weakref | |||||||
|  |  | ||||||
| from bson import DBRef, ObjectId, SON | from bson import DBRef, ObjectId, SON | ||||||
| import pymongo | import pymongo | ||||||
| import six |  | ||||||
| from six import iteritems |  | ||||||
|  |  | ||||||
| from mongoengine.base.common import UPDATE_OPERATORS | from mongoengine.base.common import UPDATE_OPERATORS | ||||||
| from mongoengine.base.datastructures import (BaseDict, BaseList, | from mongoengine.base.datastructures import BaseDict, BaseList, EmbeddedDocumentList | ||||||
|                                              EmbeddedDocumentList) |  | ||||||
| from mongoengine.common import _import_class | from mongoengine.common import _import_class | ||||||
| from mongoengine.errors import DeprecatedError, ValidationError | from mongoengine.errors import DeprecatedError, ValidationError | ||||||
|  |  | ||||||
| __all__ = ('BaseField', 'ComplexBaseField', 'ObjectIdField', | __all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField") | ||||||
|            'GeoJsonBaseField') |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class BaseField(object): | class BaseField: | ||||||
|     """A base class for fields in a MongoDB document. Instances of this class |     """A base class for fields in a MongoDB document. Instances of this class | ||||||
|     may be added to subclasses of `Document` to define a document's schema. |     may be added to subclasses of `Document` to define a document's schema. | ||||||
|  |  | ||||||
|     .. versionchanged:: 0.5 - added verbose and help text |     .. versionchanged:: 0.5 - added verbose and help text | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     name = None |     name = None | ||||||
|     _geo_index = False |     _geo_index = False | ||||||
|     _auto_gen = False  # Call `generate` to generate a value |     _auto_gen = False  # Call `generate` to generate a value | ||||||
| @@ -34,14 +31,23 @@ class BaseField(object): | |||||||
|     creation_counter = 0 |     creation_counter = 0 | ||||||
|     auto_creation_counter = -1 |     auto_creation_counter = -1 | ||||||
|  |  | ||||||
|     def __init__(self, db_field=None, name=None, required=False, default=None, |     def __init__( | ||||||
|                  unique=False, unique_with=None, primary_key=False, |         self, | ||||||
|                  validation=None, choices=None, null=False, sparse=False, |         db_field=None, | ||||||
|                  **kwargs): |         required=False, | ||||||
|  |         default=None, | ||||||
|  |         unique=False, | ||||||
|  |         unique_with=None, | ||||||
|  |         primary_key=False, | ||||||
|  |         validation=None, | ||||||
|  |         choices=None, | ||||||
|  |         null=False, | ||||||
|  |         sparse=False, | ||||||
|  |         **kwargs | ||||||
|  |     ): | ||||||
|         """ |         """ | ||||||
|         :param db_field: The database field to store this field in |         :param db_field: The database field to store this field in | ||||||
|             (defaults to the name of the field) |             (defaults to the name of the field) | ||||||
|         :param name: Deprecated - use db_field |  | ||||||
|         :param required: If the field is required. Whether it has to have a |         :param required: If the field is required. Whether it has to have a | ||||||
|             value or not. Defaults to False. |             value or not. Defaults to False. | ||||||
|         :param default: (optional) The default value for this field if no value |         :param default: (optional) The default value for this field if no value | ||||||
| @@ -65,11 +71,8 @@ class BaseField(object): | |||||||
|             existing attributes. Common metadata includes `verbose_name` and |             existing attributes. Common metadata includes `verbose_name` and | ||||||
|             `help_text`. |             `help_text`. | ||||||
|         """ |         """ | ||||||
|         self.db_field = (db_field or name) if not primary_key else '_id' |         self.db_field = db_field if not primary_key else "_id" | ||||||
|  |  | ||||||
|         if name: |  | ||||||
|             msg = 'Field\'s "name" attribute deprecated in favour of "db_field"' |  | ||||||
|             warnings.warn(msg, DeprecationWarning) |  | ||||||
|         self.required = required or primary_key |         self.required = required or primary_key | ||||||
|         self.default = default |         self.default = default | ||||||
|         self.unique = bool(unique or unique_with) |         self.unique = bool(unique or unique_with) | ||||||
| @@ -82,17 +85,14 @@ class BaseField(object): | |||||||
|         self._owner_document = None |         self._owner_document = None | ||||||
|  |  | ||||||
|         # Make sure db_field is a string (if it's explicitly defined). |         # Make sure db_field is a string (if it's explicitly defined). | ||||||
|         if ( |         if self.db_field is not None and not isinstance(self.db_field, str): | ||||||
|             self.db_field is not None and |             raise TypeError("db_field should be a string.") | ||||||
|             not isinstance(self.db_field, six.string_types) |  | ||||||
|         ): |  | ||||||
|             raise TypeError('db_field should be a string.') |  | ||||||
|  |  | ||||||
|         # Make sure db_field doesn't contain any forbidden characters. |         # Make sure db_field doesn't contain any forbidden characters. | ||||||
|         if isinstance(self.db_field, six.string_types) and ( |         if isinstance(self.db_field, str) and ( | ||||||
|             '.' in self.db_field or |             "." in self.db_field | ||||||
|             '\0' in self.db_field or |             or "\0" in self.db_field | ||||||
|             self.db_field.startswith('$') |             or self.db_field.startswith("$") | ||||||
|         ): |         ): | ||||||
|             raise ValueError( |             raise ValueError( | ||||||
|                 'field names cannot contain dots (".") or null characters ' |                 'field names cannot contain dots (".") or null characters ' | ||||||
| @@ -102,15 +102,17 @@ class BaseField(object): | |||||||
|         # Detect and report conflicts between metadata and base properties. |         # Detect and report conflicts between metadata and base properties. | ||||||
|         conflicts = set(dir(self)) & set(kwargs) |         conflicts = set(dir(self)) & set(kwargs) | ||||||
|         if conflicts: |         if conflicts: | ||||||
|             raise TypeError('%s already has attribute(s): %s' % ( |             raise TypeError( | ||||||
|                 self.__class__.__name__, ', '.join(conflicts))) |                 "%s already has attribute(s): %s" | ||||||
|  |                 % (self.__class__.__name__, ", ".join(conflicts)) | ||||||
|  |             ) | ||||||
|  |  | ||||||
|         # Assign metadata to the instance |         # Assign metadata to the instance | ||||||
|         # This efficient method is available because no __slots__ are defined. |         # This efficient method is available because no __slots__ are defined. | ||||||
|         self.__dict__.update(kwargs) |         self.__dict__.update(kwargs) | ||||||
|  |  | ||||||
|         # Adjust the appropriate creation counter, and save our local copy. |         # Adjust the appropriate creation counter, and save our local copy. | ||||||
|         if self.db_field == '_id': |         if self.db_field == "_id": | ||||||
|             self.creation_counter = BaseField.auto_creation_counter |             self.creation_counter = BaseField.auto_creation_counter | ||||||
|             BaseField.auto_creation_counter -= 1 |             BaseField.auto_creation_counter -= 1 | ||||||
|         else: |         else: | ||||||
| @@ -128,10 +130,9 @@ class BaseField(object): | |||||||
|         return instance._data.get(self.name) |         return instance._data.get(self.name) | ||||||
|  |  | ||||||
|     def __set__(self, instance, value): |     def __set__(self, instance, value): | ||||||
|         """Descriptor for assigning a value to a field in a document. |         """Descriptor for assigning a value to a field in a document.""" | ||||||
|         """ |         # If setting to None and there is a default value provided for this | ||||||
|         # If setting to None and there is a default |         # field, then set the value to the default value. | ||||||
|         # Then set the value to the default value |  | ||||||
|         if value is None: |         if value is None: | ||||||
|             if self.null: |             if self.null: | ||||||
|                 value = None |                 value = None | ||||||
| @@ -142,24 +143,29 @@ class BaseField(object): | |||||||
|  |  | ||||||
|         if instance._initialised: |         if instance._initialised: | ||||||
|             try: |             try: | ||||||
|                 if (self.name not in instance._data or |                 value_has_changed = ( | ||||||
|                         instance._data[self.name] != value): |                     self.name not in instance._data | ||||||
|  |                     or instance._data[self.name] != value | ||||||
|  |                 ) | ||||||
|  |                 if value_has_changed: | ||||||
|                     instance._mark_as_changed(self.name) |                     instance._mark_as_changed(self.name) | ||||||
|             except Exception: |             except Exception: | ||||||
|                 # Values cant be compared eg: naive and tz datetimes |                 # Some values can't be compared and throw an error when we | ||||||
|                 # So mark it as changed |                 # attempt to do so (e.g. tz-naive and tz-aware datetimes). | ||||||
|  |                 # Mark the field as changed in such cases. | ||||||
|                 instance._mark_as_changed(self.name) |                 instance._mark_as_changed(self.name) | ||||||
|  |  | ||||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') |         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||||
|         if isinstance(value, EmbeddedDocument): |         if isinstance(value, EmbeddedDocument): | ||||||
|             value._instance = weakref.proxy(instance) |             value._instance = weakref.proxy(instance) | ||||||
|         elif isinstance(value, (list, tuple)): |         elif isinstance(value, (list, tuple)): | ||||||
|             for v in value: |             for v in value: | ||||||
|                 if isinstance(v, EmbeddedDocument): |                 if isinstance(v, EmbeddedDocument): | ||||||
|                     v._instance = weakref.proxy(instance) |                     v._instance = weakref.proxy(instance) | ||||||
|  |  | ||||||
|         instance._data[self.name] = value |         instance._data[self.name] = value | ||||||
|  |  | ||||||
|     def error(self, message='', errors=None, field_name=None): |     def error(self, message="", errors=None, field_name=None): | ||||||
|         """Raise a ValidationError.""" |         """Raise a ValidationError.""" | ||||||
|         field_name = field_name if field_name else self.name |         field_name = field_name if field_name else self.name | ||||||
|         raise ValidationError(message, errors=errors, field_name=field_name) |         raise ValidationError(message, errors=errors, field_name=field_name) | ||||||
| @@ -176,11 +182,11 @@ class BaseField(object): | |||||||
|         """Helper method to call to_mongo with proper inputs.""" |         """Helper method to call to_mongo with proper inputs.""" | ||||||
|         f_inputs = self.to_mongo.__code__.co_varnames |         f_inputs = self.to_mongo.__code__.co_varnames | ||||||
|         ex_vars = {} |         ex_vars = {} | ||||||
|         if 'fields' in f_inputs: |         if "fields" in f_inputs: | ||||||
|             ex_vars['fields'] = fields |             ex_vars["fields"] = fields | ||||||
|  |  | ||||||
|         if 'use_db_field' in f_inputs: |         if "use_db_field" in f_inputs: | ||||||
|             ex_vars['use_db_field'] = use_db_field |             ex_vars["use_db_field"] = use_db_field | ||||||
|  |  | ||||||
|         return self.to_mongo(value, **ex_vars) |         return self.to_mongo(value, **ex_vars) | ||||||
|  |  | ||||||
| @@ -195,8 +201,8 @@ class BaseField(object): | |||||||
|         pass |         pass | ||||||
|  |  | ||||||
|     def _validate_choices(self, value): |     def _validate_choices(self, value): | ||||||
|         Document = _import_class('Document') |         Document = _import_class("Document") | ||||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') |         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||||
|  |  | ||||||
|         choice_list = self.choices |         choice_list = self.choices | ||||||
|         if isinstance(next(iter(choice_list)), (list, tuple)): |         if isinstance(next(iter(choice_list)), (list, tuple)): | ||||||
| @@ -206,16 +212,12 @@ class BaseField(object): | |||||||
|         # Choices which are other types of Documents |         # Choices which are other types of Documents | ||||||
|         if isinstance(value, (Document, EmbeddedDocument)): |         if isinstance(value, (Document, EmbeddedDocument)): | ||||||
|             if not any(isinstance(value, c) for c in choice_list): |             if not any(isinstance(value, c) for c in choice_list): | ||||||
|                 self.error( |                 self.error("Value must be an instance of %s" % (choice_list)) | ||||||
|                     'Value must be an instance of %s' % ( |  | ||||||
|                         six.text_type(choice_list) |  | ||||||
|                     ) |  | ||||||
|                 ) |  | ||||||
|         # Choices which are types other than Documents |         # Choices which are types other than Documents | ||||||
|         else: |         else: | ||||||
|             values = value if isinstance(value, (list, tuple)) else [value] |             values = value if isinstance(value, (list, tuple)) else [value] | ||||||
|             if len(set(values) - set(choice_list)): |             if len(set(values) - set(choice_list)): | ||||||
|                 self.error('Value must be one of %s' % six.text_type(choice_list)) |                 self.error("Value must be one of %s" % str(choice_list)) | ||||||
|  |  | ||||||
|     def _validate(self, value, **kwargs): |     def _validate(self, value, **kwargs): | ||||||
|         # Check the Choices Constraint |         # Check the Choices Constraint | ||||||
| @@ -231,13 +233,17 @@ class BaseField(object): | |||||||
|                     # in favor of having validation raising a ValidationError |                     # in favor of having validation raising a ValidationError | ||||||
|                     ret = self.validation(value) |                     ret = self.validation(value) | ||||||
|                     if ret is not None: |                     if ret is not None: | ||||||
|                         raise DeprecatedError('validation argument for `%s` must not return anything, ' |                         raise DeprecatedError( | ||||||
|                                               'it should raise a ValidationError if validation fails' % self.name) |                             "validation argument for `%s` must not return anything, " | ||||||
|  |                             "it should raise a ValidationError if validation fails" | ||||||
|  |                             % self.name | ||||||
|  |                         ) | ||||||
|                 except ValidationError as ex: |                 except ValidationError as ex: | ||||||
|                     self.error(str(ex)) |                     self.error(str(ex)) | ||||||
|             else: |             else: | ||||||
|                 raise ValueError('validation argument for `"%s"` must be a ' |                 raise ValueError( | ||||||
|                                  'callable.' % self.name) |                     'validation argument for `"%s"` must be a ' "callable." % self.name | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|         self.validate(value, **kwargs) |         self.validate(value, **kwargs) | ||||||
|  |  | ||||||
| @@ -271,35 +277,41 @@ class ComplexBaseField(BaseField): | |||||||
|             # Document class being used rather than a document object |             # Document class being used rather than a document object | ||||||
|             return self |             return self | ||||||
|  |  | ||||||
|         ReferenceField = _import_class('ReferenceField') |         ReferenceField = _import_class("ReferenceField") | ||||||
|         GenericReferenceField = _import_class('GenericReferenceField') |         GenericReferenceField = _import_class("GenericReferenceField") | ||||||
|         EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') |         EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField") | ||||||
|  |  | ||||||
|         auto_dereference = instance._fields[self.name]._auto_dereference |         auto_dereference = instance._fields[self.name]._auto_dereference | ||||||
|  |  | ||||||
|         dereference = (auto_dereference and |         dereference = auto_dereference and ( | ||||||
|                        (self.field is None or isinstance(self.field, |             self.field is None | ||||||
|                                                          (GenericReferenceField, ReferenceField)))) |             or isinstance(self.field, (GenericReferenceField, ReferenceField)) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|         _dereference = _import_class('DeReference')() |         _dereference = _import_class("DeReference")() | ||||||
|  |  | ||||||
|         if (instance._initialised and |         if ( | ||||||
|                 dereference and |             instance._initialised | ||||||
|                 instance._data.get(self.name) and |             and dereference | ||||||
|                 not getattr(instance._data[self.name], '_dereferenced', False)): |             and instance._data.get(self.name) | ||||||
|  |             and not getattr(instance._data[self.name], "_dereferenced", False) | ||||||
|  |         ): | ||||||
|             instance._data[self.name] = _dereference( |             instance._data[self.name] = _dereference( | ||||||
|                 instance._data.get(self.name), max_depth=1, instance=instance, |                 instance._data.get(self.name), | ||||||
|                 name=self.name |                 max_depth=1, | ||||||
|  |                 instance=instance, | ||||||
|  |                 name=self.name, | ||||||
|             ) |             ) | ||||||
|             if hasattr(instance._data[self.name], '_dereferenced'): |             if hasattr(instance._data[self.name], "_dereferenced"): | ||||||
|                 instance._data[self.name]._dereferenced = True |                 instance._data[self.name]._dereferenced = True | ||||||
|  |  | ||||||
|         value = super(ComplexBaseField, self).__get__(instance, owner) |         value = super().__get__(instance, owner) | ||||||
|  |  | ||||||
|         # Convert lists / values so we can watch for any changes on them |         # Convert lists / values so we can watch for any changes on them | ||||||
|         if isinstance(value, (list, tuple)): |         if isinstance(value, (list, tuple)): | ||||||
|             if (issubclass(type(self), EmbeddedDocumentListField) and |             if issubclass(type(self), EmbeddedDocumentListField) and not isinstance( | ||||||
|                     not isinstance(value, EmbeddedDocumentList)): |                 value, EmbeddedDocumentList | ||||||
|  |             ): | ||||||
|                 value = EmbeddedDocumentList(value, instance, self.name) |                 value = EmbeddedDocumentList(value, instance, self.name) | ||||||
|             elif not isinstance(value, BaseList): |             elif not isinstance(value, BaseList): | ||||||
|                 value = BaseList(value, instance, self.name) |                 value = BaseList(value, instance, self.name) | ||||||
| @@ -308,12 +320,13 @@ class ComplexBaseField(BaseField): | |||||||
|             value = BaseDict(value, instance, self.name) |             value = BaseDict(value, instance, self.name) | ||||||
|             instance._data[self.name] = value |             instance._data[self.name] = value | ||||||
|  |  | ||||||
|         if (auto_dereference and instance._initialised and |         if ( | ||||||
|                 isinstance(value, (BaseList, BaseDict)) and |             auto_dereference | ||||||
|                 not value._dereferenced): |             and instance._initialised | ||||||
|             value = _dereference( |             and isinstance(value, (BaseList, BaseDict)) | ||||||
|                 value, max_depth=1, instance=instance, name=self.name |             and not value._dereferenced | ||||||
|             ) |         ): | ||||||
|  |             value = _dereference(value, max_depth=1, instance=instance, name=self.name) | ||||||
|             value._dereferenced = True |             value._dereferenced = True | ||||||
|             instance._data[self.name] = value |             instance._data[self.name] = value | ||||||
|  |  | ||||||
| @@ -321,19 +334,19 @@ class ComplexBaseField(BaseField): | |||||||
|  |  | ||||||
|     def to_python(self, value): |     def to_python(self, value): | ||||||
|         """Convert a MongoDB-compatible type to a Python type.""" |         """Convert a MongoDB-compatible type to a Python type.""" | ||||||
|         if isinstance(value, six.string_types): |         if isinstance(value, str): | ||||||
|             return value |             return value | ||||||
|  |  | ||||||
|         if hasattr(value, 'to_python'): |         if hasattr(value, "to_python"): | ||||||
|             return value.to_python() |             return value.to_python() | ||||||
|  |  | ||||||
|         BaseDocument = _import_class('BaseDocument') |         BaseDocument = _import_class("BaseDocument") | ||||||
|         if isinstance(value, BaseDocument): |         if isinstance(value, BaseDocument): | ||||||
|             # Something is wrong, return the value as it is |             # Something is wrong, return the value as it is | ||||||
|             return value |             return value | ||||||
|  |  | ||||||
|         is_list = False |         is_list = False | ||||||
|         if not hasattr(value, 'items'): |         if not hasattr(value, "items"): | ||||||
|             try: |             try: | ||||||
|                 is_list = True |                 is_list = True | ||||||
|                 value = {idx: v for idx, v in enumerate(value)} |                 value = {idx: v for idx, v in enumerate(value)} | ||||||
| @@ -342,50 +355,54 @@ class ComplexBaseField(BaseField): | |||||||
|  |  | ||||||
|         if self.field: |         if self.field: | ||||||
|             self.field._auto_dereference = self._auto_dereference |             self.field._auto_dereference = self._auto_dereference | ||||||
|             value_dict = {key: self.field.to_python(item) |             value_dict = { | ||||||
|                           for key, item in value.items()} |                 key: self.field.to_python(item) for key, item in value.items() | ||||||
|  |             } | ||||||
|         else: |         else: | ||||||
|             Document = _import_class('Document') |             Document = _import_class("Document") | ||||||
|             value_dict = {} |             value_dict = {} | ||||||
|             for k, v in value.items(): |             for k, v in value.items(): | ||||||
|                 if isinstance(v, Document): |                 if isinstance(v, Document): | ||||||
|                     # We need the id from the saved object to create the DBRef |                     # We need the id from the saved object to create the DBRef | ||||||
|                     if v.pk is None: |                     if v.pk is None: | ||||||
|                         self.error('You can only reference documents once they' |                         self.error( | ||||||
|                                    ' have been saved to the database') |                             "You can only reference documents once they" | ||||||
|  |                             " have been saved to the database" | ||||||
|  |                         ) | ||||||
|                     collection = v._get_collection_name() |                     collection = v._get_collection_name() | ||||||
|                     value_dict[k] = DBRef(collection, v.pk) |                     value_dict[k] = DBRef(collection, v.pk) | ||||||
|                 elif hasattr(v, 'to_python'): |                 elif hasattr(v, "to_python"): | ||||||
|                     value_dict[k] = v.to_python() |                     value_dict[k] = v.to_python() | ||||||
|                 else: |                 else: | ||||||
|                     value_dict[k] = self.to_python(v) |                     value_dict[k] = self.to_python(v) | ||||||
|  |  | ||||||
|         if is_list:  # Convert back to a list |         if is_list:  # Convert back to a list | ||||||
|             return [v for _, v in sorted(value_dict.items(), |             return [ | ||||||
|                                          key=operator.itemgetter(0))] |                 v for _, v in sorted(value_dict.items(), key=operator.itemgetter(0)) | ||||||
|  |             ] | ||||||
|         return value_dict |         return value_dict | ||||||
|  |  | ||||||
|     def to_mongo(self, value, use_db_field=True, fields=None): |     def to_mongo(self, value, use_db_field=True, fields=None): | ||||||
|         """Convert a Python type to a MongoDB-compatible type.""" |         """Convert a Python type to a MongoDB-compatible type.""" | ||||||
|         Document = _import_class('Document') |         Document = _import_class("Document") | ||||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') |         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||||
|         GenericReferenceField = _import_class('GenericReferenceField') |         GenericReferenceField = _import_class("GenericReferenceField") | ||||||
|  |  | ||||||
|         if isinstance(value, six.string_types): |         if isinstance(value, str): | ||||||
|             return value |             return value | ||||||
|  |  | ||||||
|         if hasattr(value, 'to_mongo'): |         if hasattr(value, "to_mongo"): | ||||||
|             if isinstance(value, Document): |             if isinstance(value, Document): | ||||||
|                 return GenericReferenceField().to_mongo(value) |                 return GenericReferenceField().to_mongo(value) | ||||||
|             cls = value.__class__ |             cls = value.__class__ | ||||||
|             val = value.to_mongo(use_db_field, fields) |             val = value.to_mongo(use_db_field, fields) | ||||||
|             # If it's a document that is not inherited add _cls |             # If it's a document that is not inherited add _cls | ||||||
|             if isinstance(value, EmbeddedDocument): |             if isinstance(value, EmbeddedDocument): | ||||||
|                 val['_cls'] = cls.__name__ |                 val["_cls"] = cls.__name__ | ||||||
|             return val |             return val | ||||||
|  |  | ||||||
|         is_list = False |         is_list = False | ||||||
|         if not hasattr(value, 'items'): |         if not hasattr(value, "items"): | ||||||
|             try: |             try: | ||||||
|                 is_list = True |                 is_list = True | ||||||
|                 value = {k: v for k, v in enumerate(value)} |                 value = {k: v for k, v in enumerate(value)} | ||||||
| @@ -395,48 +412,51 @@ class ComplexBaseField(BaseField): | |||||||
|         if self.field: |         if self.field: | ||||||
|             value_dict = { |             value_dict = { | ||||||
|                 key: self.field._to_mongo_safe_call(item, use_db_field, fields) |                 key: self.field._to_mongo_safe_call(item, use_db_field, fields) | ||||||
|                 for key, item in iteritems(value) |                 for key, item in value.items() | ||||||
|             } |             } | ||||||
|         else: |         else: | ||||||
|             value_dict = {} |             value_dict = {} | ||||||
|             for k, v in iteritems(value): |             for k, v in value.items(): | ||||||
|                 if isinstance(v, Document): |                 if isinstance(v, Document): | ||||||
|                     # We need the id from the saved object to create the DBRef |                     # We need the id from the saved object to create the DBRef | ||||||
|                     if v.pk is None: |                     if v.pk is None: | ||||||
|                         self.error('You can only reference documents once they' |                         self.error( | ||||||
|                                    ' have been saved to the database') |                             "You can only reference documents once they" | ||||||
|  |                             " have been saved to the database" | ||||||
|  |                         ) | ||||||
|  |  | ||||||
|                     # If its a document that is not inheritable it won't have |                     # If its a document that is not inheritable it won't have | ||||||
|                     # any _cls data so make it a generic reference allows |                     # any _cls data so make it a generic reference allows | ||||||
|                     # us to dereference |                     # us to dereference | ||||||
|                     meta = getattr(v, '_meta', {}) |                     meta = getattr(v, "_meta", {}) | ||||||
|                     allow_inheritance = meta.get('allow_inheritance') |                     allow_inheritance = meta.get("allow_inheritance") | ||||||
|                     if not allow_inheritance and not self.field: |                     if not allow_inheritance and not self.field: | ||||||
|                         value_dict[k] = GenericReferenceField().to_mongo(v) |                         value_dict[k] = GenericReferenceField().to_mongo(v) | ||||||
|                     else: |                     else: | ||||||
|                         collection = v._get_collection_name() |                         collection = v._get_collection_name() | ||||||
|                         value_dict[k] = DBRef(collection, v.pk) |                         value_dict[k] = DBRef(collection, v.pk) | ||||||
|                 elif hasattr(v, 'to_mongo'): |                 elif hasattr(v, "to_mongo"): | ||||||
|                     cls = v.__class__ |                     cls = v.__class__ | ||||||
|                     val = v.to_mongo(use_db_field, fields) |                     val = v.to_mongo(use_db_field, fields) | ||||||
|                     # If it's a document that is not inherited add _cls |                     # If it's a document that is not inherited add _cls | ||||||
|                     if isinstance(v, (Document, EmbeddedDocument)): |                     if isinstance(v, (Document, EmbeddedDocument)): | ||||||
|                         val['_cls'] = cls.__name__ |                         val["_cls"] = cls.__name__ | ||||||
|                     value_dict[k] = val |                     value_dict[k] = val | ||||||
|                 else: |                 else: | ||||||
|                     value_dict[k] = self.to_mongo(v, use_db_field, fields) |                     value_dict[k] = self.to_mongo(v, use_db_field, fields) | ||||||
|  |  | ||||||
|         if is_list:  # Convert back to a list |         if is_list:  # Convert back to a list | ||||||
|             return [v for _, v in sorted(value_dict.items(), |             return [ | ||||||
|                                          key=operator.itemgetter(0))] |                 v for _, v in sorted(value_dict.items(), key=operator.itemgetter(0)) | ||||||
|  |             ] | ||||||
|         return value_dict |         return value_dict | ||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         """If field is provided ensure the value is valid.""" |         """If field is provided ensure the value is valid.""" | ||||||
|         errors = {} |         errors = {} | ||||||
|         if self.field: |         if self.field: | ||||||
|             if hasattr(value, 'iteritems') or hasattr(value, 'items'): |             if hasattr(value, "items"): | ||||||
|                 sequence = iteritems(value) |                 sequence = value.items() | ||||||
|             else: |             else: | ||||||
|                 sequence = enumerate(value) |                 sequence = enumerate(value) | ||||||
|             for k, v in sequence: |             for k, v in sequence: | ||||||
| @@ -449,11 +469,12 @@ class ComplexBaseField(BaseField): | |||||||
|  |  | ||||||
|             if errors: |             if errors: | ||||||
|                 field_class = self.field.__class__.__name__ |                 field_class = self.field.__class__.__name__ | ||||||
|                 self.error('Invalid %s item (%s)' % (field_class, value), |                 self.error( | ||||||
|                            errors=errors) |                     "Invalid {} item ({})".format(field_class, value), errors=errors | ||||||
|  |                 ) | ||||||
|         # Don't allow empty values if required |         # Don't allow empty values if required | ||||||
|         if self.required and not value: |         if self.required and not value: | ||||||
|             self.error('Field is required and cannot be empty') |             self.error("Field is required and cannot be empty") | ||||||
|  |  | ||||||
|     def prepare_query_value(self, op, value): |     def prepare_query_value(self, op, value): | ||||||
|         return self.to_mongo(value) |         return self.to_mongo(value) | ||||||
| @@ -483,10 +504,9 @@ class ObjectIdField(BaseField): | |||||||
|     def to_mongo(self, value): |     def to_mongo(self, value): | ||||||
|         if not isinstance(value, ObjectId): |         if not isinstance(value, ObjectId): | ||||||
|             try: |             try: | ||||||
|                 return ObjectId(six.text_type(value)) |                 return ObjectId(str(value)) | ||||||
|             except Exception as e: |             except Exception as e: | ||||||
|                 # e.message attribute has been deprecated since Python 2.6 |                 self.error(str(e)) | ||||||
|                 self.error(six.text_type(e)) |  | ||||||
|         return value |         return value | ||||||
|  |  | ||||||
|     def prepare_query_value(self, op, value): |     def prepare_query_value(self, op, value): | ||||||
| @@ -494,9 +514,9 @@ class ObjectIdField(BaseField): | |||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         try: |         try: | ||||||
|             ObjectId(six.text_type(value)) |             ObjectId(str(value)) | ||||||
|         except Exception: |         except Exception: | ||||||
|             self.error('Invalid Object ID') |             self.error("Invalid ObjectID") | ||||||
|  |  | ||||||
|  |  | ||||||
| class GeoJsonBaseField(BaseField): | class GeoJsonBaseField(BaseField): | ||||||
| @@ -506,72 +526,73 @@ class GeoJsonBaseField(BaseField): | |||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     _geo_index = pymongo.GEOSPHERE |     _geo_index = pymongo.GEOSPHERE | ||||||
|     _type = 'GeoBase' |     _type = "GeoBase" | ||||||
|  |  | ||||||
|     def __init__(self, auto_index=True, *args, **kwargs): |     def __init__(self, auto_index=True, *args, **kwargs): | ||||||
|         """ |         """ | ||||||
|         :param bool auto_index: Automatically create a '2dsphere' index.\ |         :param bool auto_index: Automatically create a '2dsphere' index.\ | ||||||
|             Defaults to `True`. |             Defaults to `True`. | ||||||
|         """ |         """ | ||||||
|         self._name = '%sField' % self._type |         self._name = "%sField" % self._type | ||||||
|         if not auto_index: |         if not auto_index: | ||||||
|             self._geo_index = False |             self._geo_index = False | ||||||
|         super(GeoJsonBaseField, self).__init__(*args, **kwargs) |         super().__init__(*args, **kwargs) | ||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         """Validate the GeoJson object based on its type.""" |         """Validate the GeoJson object based on its type.""" | ||||||
|         if isinstance(value, dict): |         if isinstance(value, dict): | ||||||
|             if set(value.keys()) == {'type', 'coordinates'}: |             if set(value.keys()) == {"type", "coordinates"}: | ||||||
|                 if value['type'] != self._type: |                 if value["type"] != self._type: | ||||||
|                     self.error('%s type must be "%s"' % |                     self.error('{} type must be "{}"'.format(self._name, self._type)) | ||||||
|                                (self._name, self._type)) |                 return self.validate(value["coordinates"]) | ||||||
|                 return self.validate(value['coordinates']) |  | ||||||
|             else: |             else: | ||||||
|                 self.error('%s can only accept a valid GeoJson dictionary' |                 self.error( | ||||||
|                            ' or lists of (x, y)' % self._name) |                     "%s can only accept a valid GeoJson dictionary" | ||||||
|  |                     " or lists of (x, y)" % self._name | ||||||
|  |                 ) | ||||||
|                 return |                 return | ||||||
|         elif not isinstance(value, (list, tuple)): |         elif not isinstance(value, (list, tuple)): | ||||||
|             self.error('%s can only accept lists of [x, y]' % self._name) |             self.error("%s can only accept lists of [x, y]" % self._name) | ||||||
|             return |             return | ||||||
|  |  | ||||||
|         validate = getattr(self, '_validate_%s' % self._type.lower()) |         validate = getattr(self, "_validate_%s" % self._type.lower()) | ||||||
|         error = validate(value) |         error = validate(value) | ||||||
|         if error: |         if error: | ||||||
|             self.error(error) |             self.error(error) | ||||||
|  |  | ||||||
|     def _validate_polygon(self, value, top_level=True): |     def _validate_polygon(self, value, top_level=True): | ||||||
|         if not isinstance(value, (list, tuple)): |         if not isinstance(value, (list, tuple)): | ||||||
|             return 'Polygons must contain list of linestrings' |             return "Polygons must contain list of linestrings" | ||||||
|  |  | ||||||
|         # Quick and dirty validator |         # Quick and dirty validator | ||||||
|         try: |         try: | ||||||
|             value[0][0][0] |             value[0][0][0] | ||||||
|         except (TypeError, IndexError): |         except (TypeError, IndexError): | ||||||
|             return 'Invalid Polygon must contain at least one valid linestring' |             return "Invalid Polygon must contain at least one valid linestring" | ||||||
|  |  | ||||||
|         errors = [] |         errors = [] | ||||||
|         for val in value: |         for val in value: | ||||||
|             error = self._validate_linestring(val, False) |             error = self._validate_linestring(val, False) | ||||||
|             if not error and val[0] != val[-1]: |             if not error and val[0] != val[-1]: | ||||||
|                 error = 'LineStrings must start and end at the same point' |                 error = "LineStrings must start and end at the same point" | ||||||
|             if error and error not in errors: |             if error and error not in errors: | ||||||
|                 errors.append(error) |                 errors.append(error) | ||||||
|         if errors: |         if errors: | ||||||
|             if top_level: |             if top_level: | ||||||
|                 return 'Invalid Polygon:\n%s' % ', '.join(errors) |                 return "Invalid Polygon:\n%s" % ", ".join(errors) | ||||||
|             else: |             else: | ||||||
|                 return '%s' % ', '.join(errors) |                 return "%s" % ", ".join(errors) | ||||||
|  |  | ||||||
|     def _validate_linestring(self, value, top_level=True): |     def _validate_linestring(self, value, top_level=True): | ||||||
|         """Validate a linestring.""" |         """Validate a linestring.""" | ||||||
|         if not isinstance(value, (list, tuple)): |         if not isinstance(value, (list, tuple)): | ||||||
|             return 'LineStrings must contain list of coordinate pairs' |             return "LineStrings must contain list of coordinate pairs" | ||||||
|  |  | ||||||
|         # Quick and dirty validator |         # Quick and dirty validator | ||||||
|         try: |         try: | ||||||
|             value[0][0] |             value[0][0] | ||||||
|         except (TypeError, IndexError): |         except (TypeError, IndexError): | ||||||
|             return 'Invalid LineString must contain at least one valid point' |             return "Invalid LineString must contain at least one valid point" | ||||||
|  |  | ||||||
|         errors = [] |         errors = [] | ||||||
|         for val in value: |         for val in value: | ||||||
| @@ -580,29 +601,30 @@ class GeoJsonBaseField(BaseField): | |||||||
|                 errors.append(error) |                 errors.append(error) | ||||||
|         if errors: |         if errors: | ||||||
|             if top_level: |             if top_level: | ||||||
|                 return 'Invalid LineString:\n%s' % ', '.join(errors) |                 return "Invalid LineString:\n%s" % ", ".join(errors) | ||||||
|             else: |             else: | ||||||
|                 return '%s' % ', '.join(errors) |                 return "%s" % ", ".join(errors) | ||||||
|  |  | ||||||
|     def _validate_point(self, value): |     def _validate_point(self, value): | ||||||
|         """Validate each set of coords""" |         """Validate each set of coords""" | ||||||
|         if not isinstance(value, (list, tuple)): |         if not isinstance(value, (list, tuple)): | ||||||
|             return 'Points must be a list of coordinate pairs' |             return "Points must be a list of coordinate pairs" | ||||||
|         elif not len(value) == 2: |         elif not len(value) == 2: | ||||||
|             return 'Value (%s) must be a two-dimensional point' % repr(value) |             return "Value (%s) must be a two-dimensional point" % repr(value) | ||||||
|         elif (not isinstance(value[0], (float, int)) or |         elif not isinstance(value[0], (float, int)) or not isinstance( | ||||||
|               not isinstance(value[1], (float, int))): |             value[1], (float, int) | ||||||
|             return 'Both values (%s) in point must be float or int' % repr(value) |         ): | ||||||
|  |             return "Both values (%s) in point must be float or int" % repr(value) | ||||||
|  |  | ||||||
|     def _validate_multipoint(self, value): |     def _validate_multipoint(self, value): | ||||||
|         if not isinstance(value, (list, tuple)): |         if not isinstance(value, (list, tuple)): | ||||||
|             return 'MultiPoint must be a list of Point' |             return "MultiPoint must be a list of Point" | ||||||
|  |  | ||||||
|         # Quick and dirty validator |         # Quick and dirty validator | ||||||
|         try: |         try: | ||||||
|             value[0][0] |             value[0][0] | ||||||
|         except (TypeError, IndexError): |         except (TypeError, IndexError): | ||||||
|             return 'Invalid MultiPoint must contain at least one valid point' |             return "Invalid MultiPoint must contain at least one valid point" | ||||||
|  |  | ||||||
|         errors = [] |         errors = [] | ||||||
|         for point in value: |         for point in value: | ||||||
| @@ -611,17 +633,17 @@ class GeoJsonBaseField(BaseField): | |||||||
|                 errors.append(error) |                 errors.append(error) | ||||||
|  |  | ||||||
|         if errors: |         if errors: | ||||||
|             return '%s' % ', '.join(errors) |             return "%s" % ", ".join(errors) | ||||||
|  |  | ||||||
|     def _validate_multilinestring(self, value, top_level=True): |     def _validate_multilinestring(self, value, top_level=True): | ||||||
|         if not isinstance(value, (list, tuple)): |         if not isinstance(value, (list, tuple)): | ||||||
|             return 'MultiLineString must be a list of LineString' |             return "MultiLineString must be a list of LineString" | ||||||
|  |  | ||||||
|         # Quick and dirty validator |         # Quick and dirty validator | ||||||
|         try: |         try: | ||||||
|             value[0][0][0] |             value[0][0][0] | ||||||
|         except (TypeError, IndexError): |         except (TypeError, IndexError): | ||||||
|             return 'Invalid MultiLineString must contain at least one valid linestring' |             return "Invalid MultiLineString must contain at least one valid linestring" | ||||||
|  |  | ||||||
|         errors = [] |         errors = [] | ||||||
|         for linestring in value: |         for linestring in value: | ||||||
| @@ -631,19 +653,19 @@ class GeoJsonBaseField(BaseField): | |||||||
|  |  | ||||||
|         if errors: |         if errors: | ||||||
|             if top_level: |             if top_level: | ||||||
|                 return 'Invalid MultiLineString:\n%s' % ', '.join(errors) |                 return "Invalid MultiLineString:\n%s" % ", ".join(errors) | ||||||
|             else: |             else: | ||||||
|                 return '%s' % ', '.join(errors) |                 return "%s" % ", ".join(errors) | ||||||
|  |  | ||||||
|     def _validate_multipolygon(self, value): |     def _validate_multipolygon(self, value): | ||||||
|         if not isinstance(value, (list, tuple)): |         if not isinstance(value, (list, tuple)): | ||||||
|             return 'MultiPolygon must be a list of Polygon' |             return "MultiPolygon must be a list of Polygon" | ||||||
|  |  | ||||||
|         # Quick and dirty validator |         # Quick and dirty validator | ||||||
|         try: |         try: | ||||||
|             value[0][0][0][0] |             value[0][0][0][0] | ||||||
|         except (TypeError, IndexError): |         except (TypeError, IndexError): | ||||||
|             return 'Invalid MultiPolygon must contain at least one valid Polygon' |             return "Invalid MultiPolygon must contain at least one valid Polygon" | ||||||
|  |  | ||||||
|         errors = [] |         errors = [] | ||||||
|         for polygon in value: |         for polygon in value: | ||||||
| @@ -652,9 +674,9 @@ class GeoJsonBaseField(BaseField): | |||||||
|                 errors.append(error) |                 errors.append(error) | ||||||
|  |  | ||||||
|         if errors: |         if errors: | ||||||
|             return 'Invalid MultiPolygon:\n%s' % ', '.join(errors) |             return "Invalid MultiPolygon:\n%s" % ", ".join(errors) | ||||||
|  |  | ||||||
|     def to_mongo(self, value): |     def to_mongo(self, value): | ||||||
|         if isinstance(value, dict): |         if isinstance(value, dict): | ||||||
|             return value |             return value | ||||||
|         return SON([('type', self._type), ('coordinates', value)]) |         return SON([("type", self._type), ("coordinates", value)]) | ||||||
|   | |||||||
| @@ -1,18 +1,19 @@ | |||||||
|  | import itertools | ||||||
| import warnings | import warnings | ||||||
|  |  | ||||||
| import six |  | ||||||
| from six import iteritems, itervalues |  | ||||||
|  |  | ||||||
| from mongoengine.base.common import _document_registry | from mongoengine.base.common import _document_registry | ||||||
| from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField | from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField | ||||||
| from mongoengine.common import _import_class | from mongoengine.common import _import_class | ||||||
| from mongoengine.errors import InvalidDocumentError | from mongoengine.errors import InvalidDocumentError | ||||||
| from mongoengine.queryset import (DO_NOTHING, DoesNotExist, | from mongoengine.queryset import ( | ||||||
|                                   MultipleObjectsReturned, |     DO_NOTHING, | ||||||
|                                   QuerySetManager) |     DoesNotExist, | ||||||
|  |     MultipleObjectsReturned, | ||||||
|  |     QuerySetManager, | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
| __all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass') | __all__ = ("DocumentMetaclass", "TopLevelDocumentMetaclass") | ||||||
|  |  | ||||||
|  |  | ||||||
| class DocumentMetaclass(type): | class DocumentMetaclass(type): | ||||||
| @@ -21,49 +22,51 @@ class DocumentMetaclass(type): | |||||||
|     # TODO lower complexity of this method |     # TODO lower complexity of this method | ||||||
|     def __new__(mcs, name, bases, attrs): |     def __new__(mcs, name, bases, attrs): | ||||||
|         flattened_bases = mcs._get_bases(bases) |         flattened_bases = mcs._get_bases(bases) | ||||||
|         super_new = super(DocumentMetaclass, mcs).__new__ |         super_new = super().__new__ | ||||||
|  |  | ||||||
|         # If a base class just call super |         # If a base class just call super | ||||||
|         metaclass = attrs.get('my_metaclass') |         metaclass = attrs.get("my_metaclass") | ||||||
|         if metaclass and issubclass(metaclass, DocumentMetaclass): |         if metaclass and issubclass(metaclass, DocumentMetaclass): | ||||||
|             return super_new(mcs, name, bases, attrs) |             return super_new(mcs, name, bases, attrs) | ||||||
|  |  | ||||||
|         attrs['_is_document'] = attrs.get('_is_document', False) |         attrs["_is_document"] = attrs.get("_is_document", False) | ||||||
|         attrs['_cached_reference_fields'] = [] |         attrs["_cached_reference_fields"] = [] | ||||||
|  |  | ||||||
|         # EmbeddedDocuments could have meta data for inheritance |         # EmbeddedDocuments could have meta data for inheritance | ||||||
|         if 'meta' in attrs: |         if "meta" in attrs: | ||||||
|             attrs['_meta'] = attrs.pop('meta') |             attrs["_meta"] = attrs.pop("meta") | ||||||
|  |  | ||||||
|         # EmbeddedDocuments should inherit meta data |         # EmbeddedDocuments should inherit meta data | ||||||
|         if '_meta' not in attrs: |         if "_meta" not in attrs: | ||||||
|             meta = MetaDict() |             meta = MetaDict() | ||||||
|             for base in flattened_bases[::-1]: |             for base in flattened_bases[::-1]: | ||||||
|                 # Add any mixin metadata from plain objects |                 # Add any mixin metadata from plain objects | ||||||
|                 if hasattr(base, 'meta'): |                 if hasattr(base, "meta"): | ||||||
|                     meta.merge(base.meta) |                     meta.merge(base.meta) | ||||||
|                 elif hasattr(base, '_meta'): |                 elif hasattr(base, "_meta"): | ||||||
|                     meta.merge(base._meta) |                     meta.merge(base._meta) | ||||||
|             attrs['_meta'] = meta |             attrs["_meta"] = meta | ||||||
|             attrs['_meta']['abstract'] = False  # 789: EmbeddedDocument shouldn't inherit abstract |             attrs["_meta"][ | ||||||
|  |                 "abstract" | ||||||
|  |             ] = False  # 789: EmbeddedDocument shouldn't inherit abstract | ||||||
|  |  | ||||||
|         # If allow_inheritance is True, add a "_cls" string field to the attrs |         # If allow_inheritance is True, add a "_cls" string field to the attrs | ||||||
|         if attrs['_meta'].get('allow_inheritance'): |         if attrs["_meta"].get("allow_inheritance"): | ||||||
|             StringField = _import_class('StringField') |             StringField = _import_class("StringField") | ||||||
|             attrs['_cls'] = StringField() |             attrs["_cls"] = StringField() | ||||||
|  |  | ||||||
|         # Handle document Fields |         # Handle document Fields | ||||||
|  |  | ||||||
|         # Merge all fields from subclasses |         # Merge all fields from subclasses | ||||||
|         doc_fields = {} |         doc_fields = {} | ||||||
|         for base in flattened_bases[::-1]: |         for base in flattened_bases[::-1]: | ||||||
|             if hasattr(base, '_fields'): |             if hasattr(base, "_fields"): | ||||||
|                 doc_fields.update(base._fields) |                 doc_fields.update(base._fields) | ||||||
|  |  | ||||||
|             # Standard object mixin - merge in any Fields |             # Standard object mixin - merge in any Fields | ||||||
|             if not hasattr(base, '_meta'): |             if not hasattr(base, "_meta"): | ||||||
|                 base_fields = {} |                 base_fields = {} | ||||||
|                 for attr_name, attr_value in iteritems(base.__dict__): |                 for attr_name, attr_value in base.__dict__.items(): | ||||||
|                     if not isinstance(attr_value, BaseField): |                     if not isinstance(attr_value, BaseField): | ||||||
|                         continue |                         continue | ||||||
|                     attr_value.name = attr_name |                     attr_value.name = attr_name | ||||||
| @@ -75,7 +78,7 @@ class DocumentMetaclass(type): | |||||||
|  |  | ||||||
|         # Discover any document fields |         # Discover any document fields | ||||||
|         field_names = {} |         field_names = {} | ||||||
|         for attr_name, attr_value in iteritems(attrs): |         for attr_name, attr_value in attrs.items(): | ||||||
|             if not isinstance(attr_value, BaseField): |             if not isinstance(attr_value, BaseField): | ||||||
|                 continue |                 continue | ||||||
|             attr_value.name = attr_name |             attr_value.name = attr_name | ||||||
| @@ -84,27 +87,29 @@ class DocumentMetaclass(type): | |||||||
|             doc_fields[attr_name] = attr_value |             doc_fields[attr_name] = attr_value | ||||||
|  |  | ||||||
|             # Count names to ensure no db_field redefinitions |             # Count names to ensure no db_field redefinitions | ||||||
|             field_names[attr_value.db_field] = field_names.get( |             field_names[attr_value.db_field] = ( | ||||||
|                 attr_value.db_field, 0) + 1 |                 field_names.get(attr_value.db_field, 0) + 1 | ||||||
|  |             ) | ||||||
|  |  | ||||||
|         # Ensure no duplicate db_fields |         # Ensure no duplicate db_fields | ||||||
|         duplicate_db_fields = [k for k, v in field_names.items() if v > 1] |         duplicate_db_fields = [k for k, v in field_names.items() if v > 1] | ||||||
|         if duplicate_db_fields: |         if duplicate_db_fields: | ||||||
|             msg = ('Multiple db_fields defined for: %s ' % |             msg = "Multiple db_fields defined for: %s " % ", ".join(duplicate_db_fields) | ||||||
|                    ', '.join(duplicate_db_fields)) |  | ||||||
|             raise InvalidDocumentError(msg) |             raise InvalidDocumentError(msg) | ||||||
|  |  | ||||||
|         # Set _fields and db_field maps |         # Set _fields and db_field maps | ||||||
|         attrs['_fields'] = doc_fields |         attrs["_fields"] = doc_fields | ||||||
|         attrs['_db_field_map'] = {k: getattr(v, 'db_field', k) |         attrs["_db_field_map"] = { | ||||||
|                                   for k, v in doc_fields.items()} |             k: getattr(v, "db_field", k) for k, v in doc_fields.items() | ||||||
|         attrs['_reverse_db_field_map'] = { |         } | ||||||
|             v: k for k, v in attrs['_db_field_map'].items() |         attrs["_reverse_db_field_map"] = { | ||||||
|  |             v: k for k, v in attrs["_db_field_map"].items() | ||||||
|         } |         } | ||||||
|  |  | ||||||
|         attrs['_fields_ordered'] = tuple(i[1] for i in sorted( |         attrs["_fields_ordered"] = tuple( | ||||||
|                                          (v.creation_counter, v.name) |             i[1] | ||||||
|                                          for v in itervalues(doc_fields))) |             for i in sorted((v.creation_counter, v.name) for v in doc_fields.values()) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|         # |         # | ||||||
|         # Set document hierarchy |         # Set document hierarchy | ||||||
| @@ -112,32 +117,34 @@ class DocumentMetaclass(type): | |||||||
|         superclasses = () |         superclasses = () | ||||||
|         class_name = [name] |         class_name = [name] | ||||||
|         for base in flattened_bases: |         for base in flattened_bases: | ||||||
|             if (not getattr(base, '_is_base_cls', True) and |             if not getattr(base, "_is_base_cls", True) and not getattr( | ||||||
|                     not getattr(base, '_meta', {}).get('abstract', True)): |                 base, "_meta", {} | ||||||
|  |             ).get("abstract", True): | ||||||
|                 # Collate hierarchy for _cls and _subclasses |                 # Collate hierarchy for _cls and _subclasses | ||||||
|                 class_name.append(base.__name__) |                 class_name.append(base.__name__) | ||||||
|  |  | ||||||
|             if hasattr(base, '_meta'): |             if hasattr(base, "_meta"): | ||||||
|                 # Warn if allow_inheritance isn't set and prevent |                 # Warn if allow_inheritance isn't set and prevent | ||||||
|                 # inheritance of classes where inheritance is set to False |                 # inheritance of classes where inheritance is set to False | ||||||
|                 allow_inheritance = base._meta.get('allow_inheritance') |                 allow_inheritance = base._meta.get("allow_inheritance") | ||||||
|                 if not allow_inheritance and not base._meta.get('abstract'): |                 if not allow_inheritance and not base._meta.get("abstract"): | ||||||
|                     raise ValueError('Document %s may not be subclassed. ' |                     raise ValueError( | ||||||
|                                      'To enable inheritance, use the "allow_inheritance" meta attribute.' % |                         "Document %s may not be subclassed. " | ||||||
|                                      base.__name__) |                         'To enable inheritance, use the "allow_inheritance" meta attribute.' | ||||||
|  |                         % base.__name__ | ||||||
|  |                     ) | ||||||
|  |  | ||||||
|         # Get superclasses from last base superclass |         # Get superclasses from last base superclass | ||||||
|         document_bases = [b for b in flattened_bases |         document_bases = [b for b in flattened_bases if hasattr(b, "_class_name")] | ||||||
|                           if hasattr(b, '_class_name')] |  | ||||||
|         if document_bases: |         if document_bases: | ||||||
|             superclasses = document_bases[0]._superclasses |             superclasses = document_bases[0]._superclasses | ||||||
|             superclasses += (document_bases[0]._class_name, ) |             superclasses += (document_bases[0]._class_name,) | ||||||
|  |  | ||||||
|         _cls = '.'.join(reversed(class_name)) |         _cls = ".".join(reversed(class_name)) | ||||||
|         attrs['_class_name'] = _cls |         attrs["_class_name"] = _cls | ||||||
|         attrs['_superclasses'] = superclasses |         attrs["_superclasses"] = superclasses | ||||||
|         attrs['_subclasses'] = (_cls, ) |         attrs["_subclasses"] = (_cls,) | ||||||
|         attrs['_types'] = attrs['_subclasses']  # TODO depreciate _types |         attrs["_types"] = attrs["_subclasses"]  # TODO depreciate _types | ||||||
|  |  | ||||||
|         # Create the new_class |         # Create the new_class | ||||||
|         new_class = super_new(mcs, name, bases, attrs) |         new_class = super_new(mcs, name, bases, attrs) | ||||||
| @@ -148,8 +155,12 @@ class DocumentMetaclass(type): | |||||||
|                 base._subclasses += (_cls,) |                 base._subclasses += (_cls,) | ||||||
|             base._types = base._subclasses  # TODO depreciate _types |             base._types = base._subclasses  # TODO depreciate _types | ||||||
|  |  | ||||||
|         (Document, EmbeddedDocument, DictField, |         ( | ||||||
|          CachedReferenceField) = mcs._import_classes() |             Document, | ||||||
|  |             EmbeddedDocument, | ||||||
|  |             DictField, | ||||||
|  |             CachedReferenceField, | ||||||
|  |         ) = mcs._import_classes() | ||||||
|  |  | ||||||
|         if issubclass(new_class, Document): |         if issubclass(new_class, Document): | ||||||
|             new_class._collection = None |             new_class._collection = None | ||||||
| @@ -157,63 +168,50 @@ class DocumentMetaclass(type): | |||||||
|         # Add class to the _document_registry |         # Add class to the _document_registry | ||||||
|         _document_registry[new_class._class_name] = new_class |         _document_registry[new_class._class_name] = new_class | ||||||
|  |  | ||||||
|         # In Python 2, User-defined methods objects have special read-only |  | ||||||
|         # attributes 'im_func' and 'im_self' which contain the function obj |  | ||||||
|         # and class instance object respectively.  With Python 3 these special |  | ||||||
|         # attributes have been replaced by __func__ and __self__.  The Blinker |  | ||||||
|         # module continues to use im_func and im_self, so the code below |  | ||||||
|         # copies __func__ into im_func and __self__ into im_self for |  | ||||||
|         # classmethod objects in Document derived classes. |  | ||||||
|         if six.PY3: |  | ||||||
|             for val in new_class.__dict__.values(): |  | ||||||
|                 if isinstance(val, classmethod): |  | ||||||
|                     f = val.__get__(new_class) |  | ||||||
|                     if hasattr(f, '__func__') and not hasattr(f, 'im_func'): |  | ||||||
|                         f.__dict__.update({'im_func': getattr(f, '__func__')}) |  | ||||||
|                     if hasattr(f, '__self__') and not hasattr(f, 'im_self'): |  | ||||||
|                         f.__dict__.update({'im_self': getattr(f, '__self__')}) |  | ||||||
|  |  | ||||||
|         # Handle delete rules |         # Handle delete rules | ||||||
|         for field in itervalues(new_class._fields): |         for field in new_class._fields.values(): | ||||||
|             f = field |             f = field | ||||||
|             if f.owner_document is None: |             if f.owner_document is None: | ||||||
|                 f.owner_document = new_class |                 f.owner_document = new_class | ||||||
|             delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING) |             delete_rule = getattr(f, "reverse_delete_rule", DO_NOTHING) | ||||||
|             if isinstance(f, CachedReferenceField): |             if isinstance(f, CachedReferenceField): | ||||||
|  |  | ||||||
|                 if issubclass(new_class, EmbeddedDocument): |                 if issubclass(new_class, EmbeddedDocument): | ||||||
|                     raise InvalidDocumentError('CachedReferenceFields is not ' |                     raise InvalidDocumentError( | ||||||
|                                                'allowed in EmbeddedDocuments') |                         "CachedReferenceFields is not allowed in EmbeddedDocuments" | ||||||
|  |                     ) | ||||||
|  |  | ||||||
|                 if f.auto_sync: |                 if f.auto_sync: | ||||||
|                     f.start_listener() |                     f.start_listener() | ||||||
|  |  | ||||||
|                 f.document_type._cached_reference_fields.append(f) |                 f.document_type._cached_reference_fields.append(f) | ||||||
|  |  | ||||||
|             if isinstance(f, ComplexBaseField) and hasattr(f, 'field'): |             if isinstance(f, ComplexBaseField) and hasattr(f, "field"): | ||||||
|                 delete_rule = getattr(f.field, |                 delete_rule = getattr(f.field, "reverse_delete_rule", DO_NOTHING) | ||||||
|                                       'reverse_delete_rule', |  | ||||||
|                                       DO_NOTHING) |  | ||||||
|                 if isinstance(f, DictField) and delete_rule != DO_NOTHING: |                 if isinstance(f, DictField) and delete_rule != DO_NOTHING: | ||||||
|                     msg = ('Reverse delete rules are not supported ' |                     msg = ( | ||||||
|                            'for %s (field: %s)' % |                         "Reverse delete rules are not supported " | ||||||
|                            (field.__class__.__name__, field.name)) |                         "for %s (field: %s)" % (field.__class__.__name__, field.name) | ||||||
|  |                     ) | ||||||
|                     raise InvalidDocumentError(msg) |                     raise InvalidDocumentError(msg) | ||||||
|  |  | ||||||
|                 f = field.field |                 f = field.field | ||||||
|  |  | ||||||
|             if delete_rule != DO_NOTHING: |             if delete_rule != DO_NOTHING: | ||||||
|                 if issubclass(new_class, EmbeddedDocument): |                 if issubclass(new_class, EmbeddedDocument): | ||||||
|                     msg = ('Reverse delete rules are not supported for ' |                     msg = ( | ||||||
|                            'EmbeddedDocuments (field: %s)' % field.name) |                         "Reverse delete rules are not supported for " | ||||||
|  |                         "EmbeddedDocuments (field: %s)" % field.name | ||||||
|  |                     ) | ||||||
|                     raise InvalidDocumentError(msg) |                     raise InvalidDocumentError(msg) | ||||||
|                 f.document_type.register_delete_rule(new_class, |                 f.document_type.register_delete_rule(new_class, field.name, delete_rule) | ||||||
|                                                      field.name, delete_rule) |  | ||||||
|  |  | ||||||
|             if (field.name and hasattr(Document, field.name) and |             if ( | ||||||
|                     EmbeddedDocument not in new_class.mro()): |                 field.name | ||||||
|                 msg = ('%s is a document method and not a valid ' |                 and hasattr(Document, field.name) | ||||||
|                        'field name' % field.name) |                 and EmbeddedDocument not in new_class.mro() | ||||||
|  |             ): | ||||||
|  |                 msg = "%s is a document method and not a valid field name" % field.name | ||||||
|                 raise InvalidDocumentError(msg) |                 raise InvalidDocumentError(msg) | ||||||
|  |  | ||||||
|         return new_class |         return new_class | ||||||
| @@ -233,15 +231,14 @@ class DocumentMetaclass(type): | |||||||
|             if base is object: |             if base is object: | ||||||
|                 continue |                 continue | ||||||
|             yield base |             yield base | ||||||
|             for child_base in mcs.__get_bases(base.__bases__): |             yield from mcs.__get_bases(base.__bases__) | ||||||
|                 yield child_base |  | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def _import_classes(mcs): |     def _import_classes(mcs): | ||||||
|         Document = _import_class('Document') |         Document = _import_class("Document") | ||||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') |         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||||
|         DictField = _import_class('DictField') |         DictField = _import_class("DictField") | ||||||
|         CachedReferenceField = _import_class('CachedReferenceField') |         CachedReferenceField = _import_class("CachedReferenceField") | ||||||
|         return Document, EmbeddedDocument, DictField, CachedReferenceField |         return Document, EmbeddedDocument, DictField, CachedReferenceField | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -252,68 +249,69 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | |||||||
|  |  | ||||||
|     def __new__(mcs, name, bases, attrs): |     def __new__(mcs, name, bases, attrs): | ||||||
|         flattened_bases = mcs._get_bases(bases) |         flattened_bases = mcs._get_bases(bases) | ||||||
|         super_new = super(TopLevelDocumentMetaclass, mcs).__new__ |         super_new = super().__new__ | ||||||
|  |  | ||||||
|         # Set default _meta data if base class, otherwise get user defined meta |         # Set default _meta data if base class, otherwise get user defined meta | ||||||
|         if attrs.get('my_metaclass') == TopLevelDocumentMetaclass: |         if attrs.get("my_metaclass") == TopLevelDocumentMetaclass: | ||||||
|             # defaults |             # defaults | ||||||
|             attrs['_meta'] = { |             attrs["_meta"] = { | ||||||
|                 'abstract': True, |                 "abstract": True, | ||||||
|                 'max_documents': None, |                 "max_documents": None, | ||||||
|                 'max_size': None, |                 "max_size": None, | ||||||
|                 'ordering': [],  # default ordering applied at runtime |                 "ordering": [],  # default ordering applied at runtime | ||||||
|                 'indexes': [],  # indexes to be ensured at runtime |                 "indexes": [],  # indexes to be ensured at runtime | ||||||
|                 'id_field': None, |                 "id_field": None, | ||||||
|                 'index_background': False, |                 "index_background": False, | ||||||
|                 'index_drop_dups': False, |                 "index_opts": None, | ||||||
|                 'index_opts': None, |                 "delete_rules": None, | ||||||
|                 'delete_rules': None, |  | ||||||
|  |  | ||||||
|                 # allow_inheritance can be True, False, and None. True means |                 # allow_inheritance can be True, False, and None. True means | ||||||
|                 # "allow inheritance", False means "don't allow inheritance", |                 # "allow inheritance", False means "don't allow inheritance", | ||||||
|                 # None means "do whatever your parent does, or don't allow |                 # None means "do whatever your parent does, or don't allow | ||||||
|                 # inheritance if you're a top-level class". |                 # inheritance if you're a top-level class". | ||||||
|                 'allow_inheritance': None, |                 "allow_inheritance": None, | ||||||
|             } |             } | ||||||
|             attrs['_is_base_cls'] = True |             attrs["_is_base_cls"] = True | ||||||
|             attrs['_meta'].update(attrs.get('meta', {})) |             attrs["_meta"].update(attrs.get("meta", {})) | ||||||
|         else: |         else: | ||||||
|             attrs['_meta'] = attrs.get('meta', {}) |             attrs["_meta"] = attrs.get("meta", {}) | ||||||
|             # Explicitly set abstract to false unless set |             # Explicitly set abstract to false unless set | ||||||
|             attrs['_meta']['abstract'] = attrs['_meta'].get('abstract', False) |             attrs["_meta"]["abstract"] = attrs["_meta"].get("abstract", False) | ||||||
|             attrs['_is_base_cls'] = False |             attrs["_is_base_cls"] = False | ||||||
|  |  | ||||||
|         # Set flag marking as document class - as opposed to an object mixin |         # Set flag marking as document class - as opposed to an object mixin | ||||||
|         attrs['_is_document'] = True |         attrs["_is_document"] = True | ||||||
|  |  | ||||||
|         # Ensure queryset_class is inherited |         # Ensure queryset_class is inherited | ||||||
|         if 'objects' in attrs: |         if "objects" in attrs: | ||||||
|             manager = attrs['objects'] |             manager = attrs["objects"] | ||||||
|             if hasattr(manager, 'queryset_class'): |             if hasattr(manager, "queryset_class"): | ||||||
|                 attrs['_meta']['queryset_class'] = manager.queryset_class |                 attrs["_meta"]["queryset_class"] = manager.queryset_class | ||||||
|  |  | ||||||
|         # Clean up top level meta |         # Clean up top level meta | ||||||
|         if 'meta' in attrs: |         if "meta" in attrs: | ||||||
|             del attrs['meta'] |             del attrs["meta"] | ||||||
|  |  | ||||||
|         # Find the parent document class |         # Find the parent document class | ||||||
|         parent_doc_cls = [b for b in flattened_bases |         parent_doc_cls = [ | ||||||
|                           if b.__class__ == TopLevelDocumentMetaclass] |             b for b in flattened_bases if b.__class__ == TopLevelDocumentMetaclass | ||||||
|  |         ] | ||||||
|         parent_doc_cls = None if not parent_doc_cls else parent_doc_cls[0] |         parent_doc_cls = None if not parent_doc_cls else parent_doc_cls[0] | ||||||
|  |  | ||||||
|         # Prevent classes setting collection different to their parents |         # Prevent classes setting collection different to their parents | ||||||
|         # If parent wasn't an abstract class |         # If parent wasn't an abstract class | ||||||
|         if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) and |         if ( | ||||||
|                 not parent_doc_cls._meta.get('abstract', True)): |             parent_doc_cls | ||||||
|             msg = 'Trying to set a collection on a subclass (%s)' % name |             and "collection" in attrs.get("_meta", {}) | ||||||
|  |             and not parent_doc_cls._meta.get("abstract", True) | ||||||
|  |         ): | ||||||
|  |             msg = "Trying to set a collection on a subclass (%s)" % name | ||||||
|             warnings.warn(msg, SyntaxWarning) |             warnings.warn(msg, SyntaxWarning) | ||||||
|             del attrs['_meta']['collection'] |             del attrs["_meta"]["collection"] | ||||||
|  |  | ||||||
|         # Ensure abstract documents have abstract bases |         # Ensure abstract documents have abstract bases | ||||||
|         if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'): |         if attrs.get("_is_base_cls") or attrs["_meta"].get("abstract"): | ||||||
|             if (parent_doc_cls and |             if parent_doc_cls and not parent_doc_cls._meta.get("abstract", False): | ||||||
|                     not parent_doc_cls._meta.get('abstract', False)): |                 msg = "Abstract document cannot have non-abstract base" | ||||||
|                 msg = 'Abstract document cannot have non-abstract base' |  | ||||||
|                 raise ValueError(msg) |                 raise ValueError(msg) | ||||||
|             return super_new(mcs, name, bases, attrs) |             return super_new(mcs, name, bases, attrs) | ||||||
|  |  | ||||||
| @@ -322,38 +320,43 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | |||||||
|         meta = MetaDict() |         meta = MetaDict() | ||||||
|         for base in flattened_bases[::-1]: |         for base in flattened_bases[::-1]: | ||||||
|             # Add any mixin metadata from plain objects |             # Add any mixin metadata from plain objects | ||||||
|             if hasattr(base, 'meta'): |             if hasattr(base, "meta"): | ||||||
|                 meta.merge(base.meta) |                 meta.merge(base.meta) | ||||||
|             elif hasattr(base, '_meta'): |             elif hasattr(base, "_meta"): | ||||||
|                 meta.merge(base._meta) |                 meta.merge(base._meta) | ||||||
|  |  | ||||||
|             # Set collection in the meta if its callable |             # Set collection in the meta if its callable | ||||||
|             if (getattr(base, '_is_document', False) and |             if getattr(base, "_is_document", False) and not base._meta.get("abstract"): | ||||||
|                     not base._meta.get('abstract')): |                 collection = meta.get("collection", None) | ||||||
|                 collection = meta.get('collection', None) |  | ||||||
|                 if callable(collection): |                 if callable(collection): | ||||||
|                     meta['collection'] = collection(base) |                     meta["collection"] = collection(base) | ||||||
|  |  | ||||||
|         meta.merge(attrs.get('_meta', {}))  # Top level meta |         meta.merge(attrs.get("_meta", {}))  # Top level meta | ||||||
|  |  | ||||||
|         # Only simple classes (i.e. direct subclasses of Document) may set |         # Only simple classes (i.e. direct subclasses of Document) may set | ||||||
|         # allow_inheritance to False. If the base Document allows inheritance, |         # allow_inheritance to False. If the base Document allows inheritance, | ||||||
|         # none of its subclasses can override allow_inheritance to False. |         # none of its subclasses can override allow_inheritance to False. | ||||||
|         simple_class = all([b._meta.get('abstract') |         simple_class = all( | ||||||
|                             for b in flattened_bases if hasattr(b, '_meta')]) |             [b._meta.get("abstract") for b in flattened_bases if hasattr(b, "_meta")] | ||||||
|  |         ) | ||||||
|         if ( |         if ( | ||||||
|             not simple_class and |             not simple_class | ||||||
|             meta['allow_inheritance'] is False and |             and meta["allow_inheritance"] is False | ||||||
|             not meta['abstract'] |             and not meta["abstract"] | ||||||
|         ): |         ): | ||||||
|             raise ValueError('Only direct subclasses of Document may set ' |             raise ValueError( | ||||||
|                              '"allow_inheritance" to False') |                 "Only direct subclasses of Document may set " | ||||||
|  |                 '"allow_inheritance" to False' | ||||||
|  |             ) | ||||||
|  |  | ||||||
|         # Set default collection name |         # Set default collection name | ||||||
|         if 'collection' not in meta: |         if "collection" not in meta: | ||||||
|             meta['collection'] = ''.join('_%s' % c if c.isupper() else c |             meta["collection"] = ( | ||||||
|                                          for c in name).strip('_').lower() |                 "".join("_%s" % c if c.isupper() else c for c in name) | ||||||
|         attrs['_meta'] = meta |                 .strip("_") | ||||||
|  |                 .lower() | ||||||
|  |             ) | ||||||
|  |         attrs["_meta"] = meta | ||||||
|  |  | ||||||
|         # Call super and get the new class |         # Call super and get the new class | ||||||
|         new_class = super_new(mcs, name, bases, attrs) |         new_class = super_new(mcs, name, bases, attrs) | ||||||
| @@ -361,82 +364,96 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | |||||||
|         meta = new_class._meta |         meta = new_class._meta | ||||||
|  |  | ||||||
|         # Set index specifications |         # Set index specifications | ||||||
|         meta['index_specs'] = new_class._build_index_specs(meta['indexes']) |         meta["index_specs"] = new_class._build_index_specs(meta["indexes"]) | ||||||
|  |  | ||||||
|         # If collection is a callable - call it and set the value |         # If collection is a callable - call it and set the value | ||||||
|         collection = meta.get('collection') |         collection = meta.get("collection") | ||||||
|         if callable(collection): |         if callable(collection): | ||||||
|             new_class._meta['collection'] = collection(new_class) |             new_class._meta["collection"] = collection(new_class) | ||||||
|  |  | ||||||
|         # Provide a default queryset unless exists or one has been set |         # Provide a default queryset unless exists or one has been set | ||||||
|         if 'objects' not in dir(new_class): |         if "objects" not in dir(new_class): | ||||||
|             new_class.objects = QuerySetManager() |             new_class.objects = QuerySetManager() | ||||||
|  |  | ||||||
|         # Validate the fields and set primary key if needed |         # Validate the fields and set primary key if needed | ||||||
|         for field_name, field in iteritems(new_class._fields): |         for field_name, field in new_class._fields.items(): | ||||||
|             if field.primary_key: |             if field.primary_key: | ||||||
|                 # Ensure only one primary key is set |                 # Ensure only one primary key is set | ||||||
|                 current_pk = new_class._meta.get('id_field') |                 current_pk = new_class._meta.get("id_field") | ||||||
|                 if current_pk and current_pk != field_name: |                 if current_pk and current_pk != field_name: | ||||||
|                     raise ValueError('Cannot override primary key field') |                     raise ValueError("Cannot override primary key field") | ||||||
|  |  | ||||||
|                 # Set primary key |                 # Set primary key | ||||||
|                 if not current_pk: |                 if not current_pk: | ||||||
|                     new_class._meta['id_field'] = field_name |                     new_class._meta["id_field"] = field_name | ||||||
|                     new_class.id = field |                     new_class.id = field | ||||||
|  |  | ||||||
|         # Set primary key if not defined by the document |         # If the document doesn't explicitly define a primary key field, create | ||||||
|         new_class._auto_id_field = getattr(parent_doc_cls, |         # one. Make it an ObjectIdField and give it a non-clashing name ("id" | ||||||
|                                            '_auto_id_field', False) |         # by default, but can be different if that one's taken). | ||||||
|         if not new_class._meta.get('id_field'): |         if not new_class._meta.get("id_field"): | ||||||
|             # After 0.10, find not existing names, instead of overwriting |  | ||||||
|             id_name, id_db_name = mcs.get_auto_id_names(new_class) |             id_name, id_db_name = mcs.get_auto_id_names(new_class) | ||||||
|             new_class._auto_id_field = True |             new_class._meta["id_field"] = id_name | ||||||
|             new_class._meta['id_field'] = id_name |  | ||||||
|             new_class._fields[id_name] = ObjectIdField(db_field=id_db_name) |             new_class._fields[id_name] = ObjectIdField(db_field=id_db_name) | ||||||
|             new_class._fields[id_name].name = id_name |             new_class._fields[id_name].name = id_name | ||||||
|             new_class.id = new_class._fields[id_name] |             new_class.id = new_class._fields[id_name] | ||||||
|             new_class._db_field_map[id_name] = id_db_name |             new_class._db_field_map[id_name] = id_db_name | ||||||
|             new_class._reverse_db_field_map[id_db_name] = id_name |             new_class._reverse_db_field_map[id_db_name] = id_name | ||||||
|             # Prepend id field to _fields_ordered |  | ||||||
|             new_class._fields_ordered = (id_name, ) + new_class._fields_ordered |  | ||||||
|  |  | ||||||
|         # Merge in exceptions with parent hierarchy |             # Prepend the ID field to _fields_ordered (so that it's *always* | ||||||
|  |             # the first field). | ||||||
|  |             new_class._fields_ordered = (id_name,) + new_class._fields_ordered | ||||||
|  |  | ||||||
|  |         # Merge in exceptions with parent hierarchy. | ||||||
|         exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned) |         exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned) | ||||||
|         module = attrs.get('__module__') |         module = attrs.get("__module__") | ||||||
|         for exc in exceptions_to_merge: |         for exc in exceptions_to_merge: | ||||||
|             name = exc.__name__ |             name = exc.__name__ | ||||||
|             parents = tuple(getattr(base, name) for base in flattened_bases |             parents = tuple( | ||||||
|                             if hasattr(base, name)) or (exc,) |                 getattr(base, name) for base in flattened_bases if hasattr(base, name) | ||||||
|             # Create new exception and set to new_class |             ) or (exc,) | ||||||
|             exception = type(name, parents, {'__module__': module}) |  | ||||||
|  |             # Create a new exception and set it as an attribute on the new | ||||||
|  |             # class. | ||||||
|  |             exception = type(name, parents, {"__module__": module}) | ||||||
|             setattr(new_class, name, exception) |             setattr(new_class, name, exception) | ||||||
|  |  | ||||||
|         return new_class |         return new_class | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def get_auto_id_names(mcs, new_class): |     def get_auto_id_names(mcs, new_class): | ||||||
|         id_name, id_db_name = ('id', '_id') |         """Find a name for the automatic ID field for the given new class. | ||||||
|         if id_name not in new_class._fields and \ |  | ||||||
|                 id_db_name not in (v.db_field for v in new_class._fields.values()): |         Return a two-element tuple where the first item is the field name (i.e. | ||||||
|  |         the attribute name on the object) and the second element is the DB | ||||||
|  |         field name (i.e. the name of the key stored in MongoDB). | ||||||
|  |  | ||||||
|  |         Defaults to ('id', '_id'), or generates a non-clashing name in the form | ||||||
|  |         of ('auto_id_X', '_auto_id_X') if the default name is already taken. | ||||||
|  |         """ | ||||||
|  |         id_name, id_db_name = ("id", "_id") | ||||||
|  |         existing_fields = {field_name for field_name in new_class._fields} | ||||||
|  |         existing_db_fields = {v.db_field for v in new_class._fields.values()} | ||||||
|  |         if id_name not in existing_fields and id_db_name not in existing_db_fields: | ||||||
|             return id_name, id_db_name |             return id_name, id_db_name | ||||||
|         id_basename, id_db_basename, i = 'auto_id', '_auto_id', 0 |  | ||||||
|         while id_name in new_class._fields or \ |         id_basename, id_db_basename, i = ("auto_id", "_auto_id", 0) | ||||||
|                 id_db_name in (v.db_field for v in new_class._fields.values()): |         for i in itertools.count(): | ||||||
|             id_name = '{0}_{1}'.format(id_basename, i) |             id_name = "{}_{}".format(id_basename, i) | ||||||
|             id_db_name = '{0}_{1}'.format(id_db_basename, i) |             id_db_name = "{}_{}".format(id_db_basename, i) | ||||||
|             i += 1 |             if id_name not in existing_fields and id_db_name not in existing_db_fields: | ||||||
|         return id_name, id_db_name |                 return id_name, id_db_name | ||||||
|  |  | ||||||
|  |  | ||||||
| class MetaDict(dict): | class MetaDict(dict): | ||||||
|     """Custom dictionary for meta classes. |     """Custom dictionary for meta classes. | ||||||
|     Handles the merging of set indexes |     Handles the merging of set indexes | ||||||
|     """ |     """ | ||||||
|     _merge_options = ('indexes',) |  | ||||||
|  |     _merge_options = ("indexes",) | ||||||
|  |  | ||||||
|     def merge(self, new_options): |     def merge(self, new_options): | ||||||
|         for k, v in iteritems(new_options): |         for k, v in new_options.items(): | ||||||
|             if k in self._merge_options: |             if k in self._merge_options: | ||||||
|                 self[k] = self.get(k, []) + v |                 self[k] = self.get(k, []) + v | ||||||
|             else: |             else: | ||||||
| @@ -445,4 +462,5 @@ class MetaDict(dict): | |||||||
|  |  | ||||||
| class BasesTuple(tuple): | class BasesTuple(tuple): | ||||||
|     """Special class to handle introspection of bases tuple in __new__""" |     """Special class to handle introspection of bases tuple in __new__""" | ||||||
|  |  | ||||||
|     pass |     pass | ||||||
|   | |||||||
| @@ -1,7 +1,7 @@ | |||||||
| import re | import re | ||||||
|  |  | ||||||
|  |  | ||||||
| class LazyRegexCompiler(object): | class LazyRegexCompiler: | ||||||
|     """Descriptor to allow lazy compilation of regex""" |     """Descriptor to allow lazy compilation of regex""" | ||||||
|  |  | ||||||
|     def __init__(self, pattern, flags=0): |     def __init__(self, pattern, flags=0): | ||||||
|   | |||||||
| @@ -19,34 +19,44 @@ def _import_class(cls_name): | |||||||
|     if cls_name in _class_registry_cache: |     if cls_name in _class_registry_cache: | ||||||
|         return _class_registry_cache.get(cls_name) |         return _class_registry_cache.get(cls_name) | ||||||
|  |  | ||||||
|     doc_classes = ('Document', 'DynamicEmbeddedDocument', 'EmbeddedDocument', |     doc_classes = ( | ||||||
|                    'MapReduceDocument') |         "Document", | ||||||
|  |         "DynamicEmbeddedDocument", | ||||||
|  |         "EmbeddedDocument", | ||||||
|  |         "MapReduceDocument", | ||||||
|  |     ) | ||||||
|  |  | ||||||
|     # Field Classes |     # Field Classes | ||||||
|     if not _field_list_cache: |     if not _field_list_cache: | ||||||
|         from mongoengine.fields import __all__ as fields |         from mongoengine.fields import __all__ as fields | ||||||
|  |  | ||||||
|         _field_list_cache.extend(fields) |         _field_list_cache.extend(fields) | ||||||
|         from mongoengine.base.fields import __all__ as fields |         from mongoengine.base.fields import __all__ as fields | ||||||
|  |  | ||||||
|         _field_list_cache.extend(fields) |         _field_list_cache.extend(fields) | ||||||
|  |  | ||||||
|     field_classes = _field_list_cache |     field_classes = _field_list_cache | ||||||
|  |  | ||||||
|     deref_classes = ('DeReference',) |     deref_classes = ("DeReference",) | ||||||
|  |  | ||||||
|     if cls_name == 'BaseDocument': |     if cls_name == "BaseDocument": | ||||||
|         from mongoengine.base import document as module |         from mongoengine.base import document as module | ||||||
|         import_classes = ['BaseDocument'] |  | ||||||
|  |         import_classes = ["BaseDocument"] | ||||||
|     elif cls_name in doc_classes: |     elif cls_name in doc_classes: | ||||||
|         from mongoengine import document as module |         from mongoengine import document as module | ||||||
|  |  | ||||||
|         import_classes = doc_classes |         import_classes = doc_classes | ||||||
|     elif cls_name in field_classes: |     elif cls_name in field_classes: | ||||||
|         from mongoengine import fields as module |         from mongoengine import fields as module | ||||||
|  |  | ||||||
|         import_classes = field_classes |         import_classes = field_classes | ||||||
|     elif cls_name in deref_classes: |     elif cls_name in deref_classes: | ||||||
|         from mongoengine import dereference as module |         from mongoengine import dereference as module | ||||||
|  |  | ||||||
|         import_classes = deref_classes |         import_classes = deref_classes | ||||||
|     else: |     else: | ||||||
|         raise ValueError('No import set for: %s' % cls_name) |         raise ValueError("No import set for: %s" % cls_name) | ||||||
|  |  | ||||||
|     for cls in import_classes: |     for cls in import_classes: | ||||||
|         _class_registry_cache[cls] = getattr(module, cls) |         _class_registry_cache[cls] = getattr(module, cls) | ||||||
|   | |||||||
| @@ -1,15 +1,22 @@ | |||||||
| from pymongo import MongoClient, ReadPreference, uri_parser | from pymongo import MongoClient, ReadPreference, uri_parser | ||||||
| from pymongo.database import _check_name | from pymongo.database import _check_name | ||||||
| import six |  | ||||||
|  |  | ||||||
| __all__ = ['MongoEngineConnectionError', 'connect', 'disconnect', 'disconnect_all', | __all__ = [ | ||||||
|            'register_connection', 'DEFAULT_CONNECTION_NAME', 'DEFAULT_DATABASE_NAME', |     "DEFAULT_CONNECTION_NAME", | ||||||
|            'get_db', 'get_connection'] |     "DEFAULT_DATABASE_NAME", | ||||||
|  |     "ConnectionFailure", | ||||||
|  |     "connect", | ||||||
|  |     "disconnect", | ||||||
|  |     "disconnect_all", | ||||||
|  |     "get_connection", | ||||||
|  |     "get_db", | ||||||
|  |     "register_connection", | ||||||
|  | ] | ||||||
|  |  | ||||||
|  |  | ||||||
| DEFAULT_CONNECTION_NAME = 'default' | DEFAULT_CONNECTION_NAME = "default" | ||||||
| DEFAULT_DATABASE_NAME = 'test' | DEFAULT_DATABASE_NAME = "test" | ||||||
| DEFAULT_HOST = 'localhost' | DEFAULT_HOST = "localhost" | ||||||
| DEFAULT_PORT = 27017 | DEFAULT_PORT = 27017 | ||||||
|  |  | ||||||
| _connection_settings = {} | _connection_settings = {} | ||||||
| @@ -19,10 +26,11 @@ _dbs = {} | |||||||
| READ_PREFERENCE = ReadPreference.PRIMARY | READ_PREFERENCE = ReadPreference.PRIMARY | ||||||
|  |  | ||||||
|  |  | ||||||
| class MongoEngineConnectionError(Exception): | class ConnectionFailure(Exception): | ||||||
|     """Error raised when the database connection can't be established or |     """Error raised when the database connection can't be established or | ||||||
|     when a connection with a requested alias can't be retrieved. |     when a connection with a requested alias can't be retrieved. | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     pass |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -30,19 +38,24 @@ def _check_db_name(name): | |||||||
|     """Check if a database name is valid. |     """Check if a database name is valid. | ||||||
|     This functionality is copied from pymongo Database class constructor. |     This functionality is copied from pymongo Database class constructor. | ||||||
|     """ |     """ | ||||||
|     if not isinstance(name, six.string_types): |     if not isinstance(name, str): | ||||||
|         raise TypeError('name must be an instance of %s' % six.string_types) |         raise TypeError("name must be an instance of %s" % str) | ||||||
|     elif name != '$external': |     elif name != "$external": | ||||||
|         _check_name(name) |         _check_name(name) | ||||||
|  |  | ||||||
|  |  | ||||||
| def _get_connection_settings( | def _get_connection_settings( | ||||||
|         db=None, name=None, host=None, port=None, |     db=None, | ||||||
|         read_preference=READ_PREFERENCE, |     name=None, | ||||||
|         username=None, password=None, |     host=None, | ||||||
|         authentication_source=None, |     port=None, | ||||||
|         authentication_mechanism=None, |     read_preference=READ_PREFERENCE, | ||||||
|         **kwargs): |     username=None, | ||||||
|  |     password=None, | ||||||
|  |     authentication_source=None, | ||||||
|  |     authentication_mechanism=None, | ||||||
|  |     **kwargs | ||||||
|  | ): | ||||||
|     """Get the connection settings as a dict |     """Get the connection settings as a dict | ||||||
|  |  | ||||||
|     : param db: the name of the database to use, for compatibility with connect |     : param db: the name of the database to use, for compatibility with connect | ||||||
| @@ -65,53 +78,61 @@ def _get_connection_settings( | |||||||
|     .. versionchanged:: 0.10.6 - added mongomock support |     .. versionchanged:: 0.10.6 - added mongomock support | ||||||
|     """ |     """ | ||||||
|     conn_settings = { |     conn_settings = { | ||||||
|         'name': name or db or DEFAULT_DATABASE_NAME, |         "name": name or db or DEFAULT_DATABASE_NAME, | ||||||
|         'host': host or DEFAULT_HOST, |         "host": host or DEFAULT_HOST, | ||||||
|         'port': port or DEFAULT_PORT, |         "port": port or DEFAULT_PORT, | ||||||
|         'read_preference': read_preference, |         "read_preference": read_preference, | ||||||
|         'username': username, |         "username": username, | ||||||
|         'password': password, |         "password": password, | ||||||
|         'authentication_source': authentication_source, |         "authentication_source": authentication_source, | ||||||
|         'authentication_mechanism': authentication_mechanism |         "authentication_mechanism": authentication_mechanism, | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     _check_db_name(conn_settings['name']) |     _check_db_name(conn_settings["name"]) | ||||||
|     conn_host = conn_settings['host'] |     conn_host = conn_settings["host"] | ||||||
|  |  | ||||||
|     # Host can be a list or a string, so if string, force to a list. |     # Host can be a list or a string, so if string, force to a list. | ||||||
|     if isinstance(conn_host, six.string_types): |     if isinstance(conn_host, str): | ||||||
|         conn_host = [conn_host] |         conn_host = [conn_host] | ||||||
|  |  | ||||||
|     resolved_hosts = [] |     resolved_hosts = [] | ||||||
|     for entity in conn_host: |     for entity in conn_host: | ||||||
|  |  | ||||||
|         # Handle Mongomock |         # Handle Mongomock | ||||||
|         if entity.startswith('mongomock://'): |         if entity.startswith("mongomock://"): | ||||||
|             conn_settings['is_mock'] = True |             conn_settings["is_mock"] = True | ||||||
|             # `mongomock://` is not a valid url prefix and must be replaced by `mongodb://` |             # `mongomock://` is not a valid url prefix and must be replaced by `mongodb://` | ||||||
|             resolved_hosts.append(entity.replace('mongomock://', 'mongodb://', 1)) |             new_entity = entity.replace("mongomock://", "mongodb://", 1) | ||||||
|  |             resolved_hosts.append(new_entity) | ||||||
|  |  | ||||||
|  |             uri_dict = uri_parser.parse_uri(new_entity) | ||||||
|  |  | ||||||
|  |             database = uri_dict.get("database") | ||||||
|  |             if database: | ||||||
|  |                 conn_settings["name"] = database | ||||||
|  |  | ||||||
|         # Handle URI style connections, only updating connection params which |         # Handle URI style connections, only updating connection params which | ||||||
|         # were explicitly specified in the URI. |         # were explicitly specified in the URI. | ||||||
|         elif '://' in entity: |         elif "://" in entity: | ||||||
|             uri_dict = uri_parser.parse_uri(entity) |             uri_dict = uri_parser.parse_uri(entity) | ||||||
|             resolved_hosts.append(entity) |             resolved_hosts.append(entity) | ||||||
|  |  | ||||||
|             if uri_dict.get('database'): |             database = uri_dict.get("database") | ||||||
|                 conn_settings['name'] = uri_dict.get('database') |             if database: | ||||||
|  |                 conn_settings["name"] = database | ||||||
|  |  | ||||||
|             for param in ('read_preference', 'username', 'password'): |             for param in ("read_preference", "username", "password"): | ||||||
|                 if uri_dict.get(param): |                 if uri_dict.get(param): | ||||||
|                     conn_settings[param] = uri_dict[param] |                     conn_settings[param] = uri_dict[param] | ||||||
|  |  | ||||||
|             uri_options = uri_dict['options'] |             uri_options = uri_dict["options"] | ||||||
|             if 'replicaset' in uri_options: |             if "replicaset" in uri_options: | ||||||
|                 conn_settings['replicaSet'] = uri_options['replicaset'] |                 conn_settings["replicaSet"] = uri_options["replicaset"] | ||||||
|             if 'authsource' in uri_options: |             if "authsource" in uri_options: | ||||||
|                 conn_settings['authentication_source'] = uri_options['authsource'] |                 conn_settings["authentication_source"] = uri_options["authsource"] | ||||||
|             if 'authmechanism' in uri_options: |             if "authmechanism" in uri_options: | ||||||
|                 conn_settings['authentication_mechanism'] = uri_options['authmechanism'] |                 conn_settings["authentication_mechanism"] = uri_options["authmechanism"] | ||||||
|             if 'readpreference' in uri_options: |             if "readpreference" in uri_options: | ||||||
|                 read_preferences = ( |                 read_preferences = ( | ||||||
|                     ReadPreference.NEAREST, |                     ReadPreference.NEAREST, | ||||||
|                     ReadPreference.PRIMARY, |                     ReadPreference.PRIMARY, | ||||||
| @@ -125,40 +146,47 @@ def _get_connection_settings( | |||||||
|                 # int (e.g. 3). |                 # int (e.g. 3). | ||||||
|                 # TODO simplify the code below once we drop support for |                 # TODO simplify the code below once we drop support for | ||||||
|                 # PyMongo v3.4. |                 # PyMongo v3.4. | ||||||
|                 read_pf_mode = uri_options['readpreference'] |                 read_pf_mode = uri_options["readpreference"] | ||||||
|                 if isinstance(read_pf_mode, six.string_types): |                 if isinstance(read_pf_mode, str): | ||||||
|                     read_pf_mode = read_pf_mode.lower() |                     read_pf_mode = read_pf_mode.lower() | ||||||
|                 for preference in read_preferences: |                 for preference in read_preferences: | ||||||
|                     if ( |                     if ( | ||||||
|                         preference.name.lower() == read_pf_mode or |                         preference.name.lower() == read_pf_mode | ||||||
|                         preference.mode == read_pf_mode |                         or preference.mode == read_pf_mode | ||||||
|                     ): |                     ): | ||||||
|                         conn_settings['read_preference'] = preference |                         conn_settings["read_preference"] = preference | ||||||
|                         break |                         break | ||||||
|         else: |         else: | ||||||
|             resolved_hosts.append(entity) |             resolved_hosts.append(entity) | ||||||
|     conn_settings['host'] = resolved_hosts |     conn_settings["host"] = resolved_hosts | ||||||
|  |  | ||||||
|     # Deprecated parameters that should not be passed on |     # Deprecated parameters that should not be passed on | ||||||
|     kwargs.pop('slaves', None) |     kwargs.pop("slaves", None) | ||||||
|     kwargs.pop('is_slave', None) |     kwargs.pop("is_slave", None) | ||||||
|  |  | ||||||
|     conn_settings.update(kwargs) |     conn_settings.update(kwargs) | ||||||
|     return conn_settings |     return conn_settings | ||||||
|  |  | ||||||
|  |  | ||||||
| def register_connection(alias, db=None, name=None, host=None, port=None, | def register_connection( | ||||||
|                         read_preference=READ_PREFERENCE, |     alias, | ||||||
|                         username=None, password=None, |     db=None, | ||||||
|                         authentication_source=None, |     name=None, | ||||||
|                         authentication_mechanism=None, |     host=None, | ||||||
|                         **kwargs): |     port=None, | ||||||
|  |     read_preference=READ_PREFERENCE, | ||||||
|  |     username=None, | ||||||
|  |     password=None, | ||||||
|  |     authentication_source=None, | ||||||
|  |     authentication_mechanism=None, | ||||||
|  |     **kwargs | ||||||
|  | ): | ||||||
|     """Register the connection settings. |     """Register the connection settings. | ||||||
|  |  | ||||||
|     : param alias: the name that will be used to refer to this connection |     : param alias: the name that will be used to refer to this connection | ||||||
|         throughout MongoEngine |         throughout MongoEngine | ||||||
|     : param name: the name of the specific database to use |  | ||||||
|     : param db: the name of the database to use, for compatibility with connect |     : param db: the name of the database to use, for compatibility with connect | ||||||
|  |     : param name: the name of the specific database to use | ||||||
|     : param host: the host name of the: program: `mongod` instance to connect to |     : param host: the host name of the: program: `mongod` instance to connect to | ||||||
|     : param port: the port that the: program: `mongod` instance is running on |     : param port: the port that the: program: `mongod` instance is running on | ||||||
|     : param read_preference: The read preference for the collection |     : param read_preference: The read preference for the collection | ||||||
| @@ -177,12 +205,17 @@ def register_connection(alias, db=None, name=None, host=None, port=None, | |||||||
|     .. versionchanged:: 0.10.6 - added mongomock support |     .. versionchanged:: 0.10.6 - added mongomock support | ||||||
|     """ |     """ | ||||||
|     conn_settings = _get_connection_settings( |     conn_settings = _get_connection_settings( | ||||||
|         db=db, name=name, host=host, port=port, |         db=db, | ||||||
|  |         name=name, | ||||||
|  |         host=host, | ||||||
|  |         port=port, | ||||||
|         read_preference=read_preference, |         read_preference=read_preference, | ||||||
|         username=username, password=password, |         username=username, | ||||||
|  |         password=password, | ||||||
|         authentication_source=authentication_source, |         authentication_source=authentication_source, | ||||||
|         authentication_mechanism=authentication_mechanism, |         authentication_mechanism=authentication_mechanism, | ||||||
|         **kwargs) |         **kwargs | ||||||
|  |     ) | ||||||
|     _connection_settings[alias] = conn_settings |     _connection_settings[alias] = conn_settings | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -198,7 +231,7 @@ def disconnect(alias=DEFAULT_CONNECTION_NAME): | |||||||
|     if alias in _dbs: |     if alias in _dbs: | ||||||
|         # Detach all cached collections in Documents |         # Detach all cached collections in Documents | ||||||
|         for doc_cls in _get_documents_by_db(alias, DEFAULT_CONNECTION_NAME): |         for doc_cls in _get_documents_by_db(alias, DEFAULT_CONNECTION_NAME): | ||||||
|             if issubclass(doc_cls, Document):     # Skip EmbeddedDocument |             if issubclass(doc_cls, Document):  # Skip EmbeddedDocument | ||||||
|                 doc_cls._disconnect() |                 doc_cls._disconnect() | ||||||
|  |  | ||||||
|         del _dbs[alias] |         del _dbs[alias] | ||||||
| @@ -226,22 +259,24 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | |||||||
|         return _connections[alias] |         return _connections[alias] | ||||||
|  |  | ||||||
|     # Validate that the requested alias exists in the _connection_settings. |     # Validate that the requested alias exists in the _connection_settings. | ||||||
|     # Raise MongoEngineConnectionError if it doesn't. |     # Raise ConnectionFailure if it doesn't. | ||||||
|     if alias not in _connection_settings: |     if alias not in _connection_settings: | ||||||
|         if alias == DEFAULT_CONNECTION_NAME: |         if alias == DEFAULT_CONNECTION_NAME: | ||||||
|             msg = 'You have not defined a default connection' |             msg = "You have not defined a default connection" | ||||||
|         else: |         else: | ||||||
|             msg = 'Connection with alias "%s" has not been defined' % alias |             msg = 'Connection with alias "%s" has not been defined' % alias | ||||||
|         raise MongoEngineConnectionError(msg) |         raise ConnectionFailure(msg) | ||||||
|  |  | ||||||
|     def _clean_settings(settings_dict): |     def _clean_settings(settings_dict): | ||||||
|         irrelevant_fields_set = { |         irrelevant_fields_set = { | ||||||
|             'name', 'username', 'password', |             "name", | ||||||
|             'authentication_source', 'authentication_mechanism' |             "username", | ||||||
|  |             "password", | ||||||
|  |             "authentication_source", | ||||||
|  |             "authentication_mechanism", | ||||||
|         } |         } | ||||||
|         return { |         return { | ||||||
|             k: v for k, v in settings_dict.items() |             k: v for k, v in settings_dict.items() if k not in irrelevant_fields_set | ||||||
|             if k not in irrelevant_fields_set |  | ||||||
|         } |         } | ||||||
|  |  | ||||||
|     raw_conn_settings = _connection_settings[alias].copy() |     raw_conn_settings = _connection_settings[alias].copy() | ||||||
| @@ -252,41 +287,37 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | |||||||
|     conn_settings = _clean_settings(raw_conn_settings) |     conn_settings = _clean_settings(raw_conn_settings) | ||||||
|  |  | ||||||
|     # Determine if we should use PyMongo's or mongomock's MongoClient. |     # Determine if we should use PyMongo's or mongomock's MongoClient. | ||||||
|     is_mock = conn_settings.pop('is_mock', False) |     is_mock = conn_settings.pop("is_mock", False) | ||||||
|     if is_mock: |     if is_mock: | ||||||
|         try: |         try: | ||||||
|             import mongomock |             import mongomock | ||||||
|         except ImportError: |         except ImportError: | ||||||
|             raise RuntimeError('You need mongomock installed to mock ' |             raise RuntimeError("You need mongomock installed to mock MongoEngine.") | ||||||
|                                'MongoEngine.') |  | ||||||
|         connection_class = mongomock.MongoClient |         connection_class = mongomock.MongoClient | ||||||
|     else: |     else: | ||||||
|         connection_class = MongoClient |         connection_class = MongoClient | ||||||
|  |  | ||||||
|     # Re-use existing connection if one is suitable |     # Re-use existing connection if one is suitable. | ||||||
|     existing_connection = _find_existing_connection(raw_conn_settings) |     existing_connection = _find_existing_connection(raw_conn_settings) | ||||||
|  |  | ||||||
|     # If an existing connection was found, assign it to the new alias |  | ||||||
|     if existing_connection: |     if existing_connection: | ||||||
|         _connections[alias] = existing_connection |         connection = existing_connection | ||||||
|     else: |     else: | ||||||
|         _connections[alias] = _create_connection(alias=alias, |         connection = _create_connection( | ||||||
|                                                  connection_class=connection_class, |             alias=alias, connection_class=connection_class, **conn_settings | ||||||
|                                                  **conn_settings) |         ) | ||||||
|  |     _connections[alias] = connection | ||||||
|     return _connections[alias] |     return _connections[alias] | ||||||
|  |  | ||||||
|  |  | ||||||
| def _create_connection(alias, connection_class, **connection_settings): | def _create_connection(alias, connection_class, **connection_settings): | ||||||
|     """ |     """ | ||||||
|     Create the new connection for this alias. Raise |     Create the new connection for this alias. Raise | ||||||
|     MongoEngineConnectionError if it can't be established. |     ConnectionFailure if it can't be established. | ||||||
|     """ |     """ | ||||||
|     try: |     try: | ||||||
|         return connection_class(**connection_settings) |         return connection_class(**connection_settings) | ||||||
|     except Exception as e: |     except Exception as e: | ||||||
|         raise MongoEngineConnectionError( |         raise ConnectionFailure("Cannot connect to database {} :\n{}".format(alias, e)) | ||||||
|             'Cannot connect to database %s :\n%s' % (alias, e)) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def _find_existing_connection(connection_settings): | def _find_existing_connection(connection_settings): | ||||||
| @@ -308,7 +339,7 @@ def _find_existing_connection(connection_settings): | |||||||
|         # Only remove the name but it's important to |         # Only remove the name but it's important to | ||||||
|         # keep the username/password/authentication_source/authentication_mechanism |         # keep the username/password/authentication_source/authentication_mechanism | ||||||
|         # to identify if the connection could be shared (cfr https://github.com/MongoEngine/mongoengine/issues/2047) |         # to identify if the connection could be shared (cfr https://github.com/MongoEngine/mongoengine/issues/2047) | ||||||
|         return {k: v for k, v in settings_dict.items() if k != 'name'} |         return {k: v for k, v in settings_dict.items() if k != "name"} | ||||||
|  |  | ||||||
|     cleaned_conn_settings = _clean_settings(connection_settings) |     cleaned_conn_settings = _clean_settings(connection_settings) | ||||||
|     for db_alias, connection_settings in connection_settings_bis: |     for db_alias, connection_settings in connection_settings_bis: | ||||||
| @@ -324,14 +355,18 @@ def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | |||||||
|     if alias not in _dbs: |     if alias not in _dbs: | ||||||
|         conn = get_connection(alias) |         conn = get_connection(alias) | ||||||
|         conn_settings = _connection_settings[alias] |         conn_settings = _connection_settings[alias] | ||||||
|         db = conn[conn_settings['name']] |         db = conn[conn_settings["name"]] | ||||||
|         auth_kwargs = {'source': conn_settings['authentication_source']} |         auth_kwargs = {"source": conn_settings["authentication_source"]} | ||||||
|         if conn_settings['authentication_mechanism'] is not None: |         if conn_settings["authentication_mechanism"] is not None: | ||||||
|             auth_kwargs['mechanism'] = conn_settings['authentication_mechanism'] |             auth_kwargs["mechanism"] = conn_settings["authentication_mechanism"] | ||||||
|         # Authenticate if necessary |         # Authenticate if necessary | ||||||
|         if conn_settings['username'] and (conn_settings['password'] or |         if conn_settings["username"] and ( | ||||||
|                                           conn_settings['authentication_mechanism'] == 'MONGODB-X509'): |             conn_settings["password"] | ||||||
|             db.authenticate(conn_settings['username'], conn_settings['password'], **auth_kwargs) |             or conn_settings["authentication_mechanism"] == "MONGODB-X509" | ||||||
|  |         ): | ||||||
|  |             db.authenticate( | ||||||
|  |                 conn_settings["username"], conn_settings["password"], **auth_kwargs | ||||||
|  |             ) | ||||||
|         _dbs[alias] = db |         _dbs[alias] = db | ||||||
|     return _dbs[alias] |     return _dbs[alias] | ||||||
|  |  | ||||||
| @@ -359,8 +394,11 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs): | |||||||
|         new_conn_settings = _get_connection_settings(db, **kwargs) |         new_conn_settings = _get_connection_settings(db, **kwargs) | ||||||
|  |  | ||||||
|         if new_conn_settings != prev_conn_setting: |         if new_conn_settings != prev_conn_setting: | ||||||
|             raise MongoEngineConnectionError( |             err_msg = ( | ||||||
|                 'A different connection with alias `%s` was already registered. Use disconnect() first' % alias) |                 "A different connection with alias `{}` was already " | ||||||
|  |                 "registered. Use disconnect() first" | ||||||
|  |             ).format(alias) | ||||||
|  |             raise ConnectionFailure(err_msg) | ||||||
|     else: |     else: | ||||||
|         register_connection(alias, db, **kwargs) |         register_connection(alias, db, **kwargs) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,17 +1,24 @@ | |||||||
| from contextlib import contextmanager | from contextlib import contextmanager | ||||||
|  |  | ||||||
|  | from pymongo.read_concern import ReadConcern | ||||||
| from pymongo.write_concern import WriteConcern | from pymongo.write_concern import WriteConcern | ||||||
| from six import iteritems |  | ||||||
|  |  | ||||||
| from mongoengine.common import _import_class | from mongoengine.common import _import_class | ||||||
| from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | ||||||
| from mongoengine.pymongo_support import count_documents | from mongoengine.pymongo_support import count_documents | ||||||
|  |  | ||||||
| __all__ = ('switch_db', 'switch_collection', 'no_dereference', | __all__ = ( | ||||||
|            'no_sub_classes', 'query_counter', 'set_write_concern') |     "switch_db", | ||||||
|  |     "switch_collection", | ||||||
|  |     "no_dereference", | ||||||
|  |     "no_sub_classes", | ||||||
|  |     "query_counter", | ||||||
|  |     "set_write_concern", | ||||||
|  |     "set_read_write_concern", | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
| class switch_db(object): | class switch_db: | ||||||
|     """switch_db alias context manager. |     """switch_db alias context manager. | ||||||
|  |  | ||||||
|     Example :: |     Example :: | ||||||
| @@ -38,21 +45,21 @@ class switch_db(object): | |||||||
|         self.cls = cls |         self.cls = cls | ||||||
|         self.collection = cls._get_collection() |         self.collection = cls._get_collection() | ||||||
|         self.db_alias = db_alias |         self.db_alias = db_alias | ||||||
|         self.ori_db_alias = cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME) |         self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME) | ||||||
|  |  | ||||||
|     def __enter__(self): |     def __enter__(self): | ||||||
|         """Change the db_alias and clear the cached collection.""" |         """Change the db_alias and clear the cached collection.""" | ||||||
|         self.cls._meta['db_alias'] = self.db_alias |         self.cls._meta["db_alias"] = self.db_alias | ||||||
|         self.cls._collection = None |         self.cls._collection = None | ||||||
|         return self.cls |         return self.cls | ||||||
|  |  | ||||||
|     def __exit__(self, t, value, traceback): |     def __exit__(self, t, value, traceback): | ||||||
|         """Reset the db_alias and collection.""" |         """Reset the db_alias and collection.""" | ||||||
|         self.cls._meta['db_alias'] = self.ori_db_alias |         self.cls._meta["db_alias"] = self.ori_db_alias | ||||||
|         self.cls._collection = self.collection |         self.cls._collection = self.collection | ||||||
|  |  | ||||||
|  |  | ||||||
| class switch_collection(object): | class switch_collection: | ||||||
|     """switch_collection alias context manager. |     """switch_collection alias context manager. | ||||||
|  |  | ||||||
|     Example :: |     Example :: | ||||||
| @@ -94,7 +101,7 @@ class switch_collection(object): | |||||||
|         self.cls._get_collection_name = self.ori_get_collection_name |         self.cls._get_collection_name = self.ori_get_collection_name | ||||||
|  |  | ||||||
|  |  | ||||||
| class no_dereference(object): | class no_dereference: | ||||||
|     """no_dereference context manager. |     """no_dereference context manager. | ||||||
|  |  | ||||||
|     Turns off all dereferencing in Documents for the duration of the context |     Turns off all dereferencing in Documents for the duration of the context | ||||||
| @@ -111,14 +118,15 @@ class no_dereference(object): | |||||||
|         """ |         """ | ||||||
|         self.cls = cls |         self.cls = cls | ||||||
|  |  | ||||||
|         ReferenceField = _import_class('ReferenceField') |         ReferenceField = _import_class("ReferenceField") | ||||||
|         GenericReferenceField = _import_class('GenericReferenceField') |         GenericReferenceField = _import_class("GenericReferenceField") | ||||||
|         ComplexBaseField = _import_class('ComplexBaseField') |         ComplexBaseField = _import_class("ComplexBaseField") | ||||||
|  |  | ||||||
|         self.deref_fields = [k for k, v in iteritems(self.cls._fields) |         self.deref_fields = [ | ||||||
|                              if isinstance(v, (ReferenceField, |             k | ||||||
|                                                GenericReferenceField, |             for k, v in self.cls._fields.items() | ||||||
|                                                ComplexBaseField))] |             if isinstance(v, (ReferenceField, GenericReferenceField, ComplexBaseField)) | ||||||
|  |         ] | ||||||
|  |  | ||||||
|     def __enter__(self): |     def __enter__(self): | ||||||
|         """Change the objects default and _auto_dereference values.""" |         """Change the objects default and _auto_dereference values.""" | ||||||
| @@ -133,7 +141,7 @@ class no_dereference(object): | |||||||
|         return self.cls |         return self.cls | ||||||
|  |  | ||||||
|  |  | ||||||
| class no_sub_classes(object): | class no_sub_classes: | ||||||
|     """no_sub_classes context manager. |     """no_sub_classes context manager. | ||||||
|  |  | ||||||
|     Only returns instances of this class and no sub (inherited) classes:: |     Only returns instances of this class and no sub (inherited) classes:: | ||||||
| @@ -161,10 +169,10 @@ class no_sub_classes(object): | |||||||
|         self.cls._subclasses = self.cls_initial_subclasses |         self.cls._subclasses = self.cls_initial_subclasses | ||||||
|  |  | ||||||
|  |  | ||||||
| class query_counter(object): | class query_counter: | ||||||
|     """Query_counter context manager to get the number of queries. |     """Query_counter context manager to get the number of queries. | ||||||
|     This works by updating the `profiling_level` of the database so that all queries get logged, |     This works by updating the `profiling_level` of the database so that all queries get logged, | ||||||
|     resetting the db.system.profile collection at the beginnig of the context and counting the new entries. |     resetting the db.system.profile collection at the beginning of the context and counting the new entries. | ||||||
|  |  | ||||||
|     This was designed for debugging purpose. In fact it is a global counter so queries issued by other threads/processes |     This was designed for debugging purpose. In fact it is a global counter so queries issued by other threads/processes | ||||||
|     can interfere with it |     can interfere with it | ||||||
| @@ -175,20 +183,17 @@ class query_counter(object): | |||||||
|     - Some queries are ignored by default by the counter (killcursors, db.system.indexes) |     - Some queries are ignored by default by the counter (killcursors, db.system.indexes) | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     def __init__(self): |     def __init__(self, alias=DEFAULT_CONNECTION_NAME): | ||||||
|         """Construct the query_counter |         """Construct the query_counter | ||||||
|         """ |         """ | ||||||
|         self.db = get_db() |         self.db = get_db(alias=alias) | ||||||
|         self.initial_profiling_level = None |         self.initial_profiling_level = None | ||||||
|         self._ctx_query_counter = 0             # number of queries issued by the context |         self._ctx_query_counter = 0  # number of queries issued by the context | ||||||
|  |  | ||||||
|         self._ignored_query = { |         self._ignored_query = { | ||||||
|             'ns': |             "ns": {"$ne": "%s.system.indexes" % self.db.name}, | ||||||
|                 {'$ne': '%s.system.indexes' % self.db.name}, |             "op": {"$ne": "killcursors"},  # MONGODB < 3.2 | ||||||
|             'op':                       # MONGODB < 3.2 |             "command.killCursors": {"$exists": False},  # MONGODB >= 3.2 | ||||||
|                 {'$ne': 'killcursors'}, |  | ||||||
|             'command.killCursors':      # MONGODB >= 3.2 |  | ||||||
|                 {'$exists': False} |  | ||||||
|         } |         } | ||||||
|  |  | ||||||
|     def _turn_on_profiling(self): |     def _turn_on_profiling(self): | ||||||
| @@ -231,15 +236,20 @@ class query_counter(object): | |||||||
|  |  | ||||||
|     def __repr__(self): |     def __repr__(self): | ||||||
|         """repr query_counter as the number of queries.""" |         """repr query_counter as the number of queries.""" | ||||||
|         return u"%s" % self._get_count() |         return "%s" % self._get_count() | ||||||
|  |  | ||||||
|     def _get_count(self): |     def _get_count(self): | ||||||
|         """Get the number of queries by counting the current number of entries in db.system.profile |         """Get the number of queries by counting the current number of entries in db.system.profile | ||||||
|         and substracting the queries issued by this context. In fact everytime this is called, 1 query is |         and substracting the queries issued by this context. In fact everytime this is called, 1 query is | ||||||
|         issued so we need to balance that |         issued so we need to balance that | ||||||
|         """ |         """ | ||||||
|         count = count_documents(self.db.system.profile, self._ignored_query) - self._ctx_query_counter |         count = ( | ||||||
|         self._ctx_query_counter += 1    # Account for the query we just issued to gather the information |             count_documents(self.db.system.profile, self._ignored_query) | ||||||
|  |             - self._ctx_query_counter | ||||||
|  |         ) | ||||||
|  |         self._ctx_query_counter += ( | ||||||
|  |             1  # Account for the query we just issued to gather the information | ||||||
|  |         ) | ||||||
|         return count |         return count | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -248,3 +258,21 @@ def set_write_concern(collection, write_concerns): | |||||||
|     combined_concerns = dict(collection.write_concern.document.items()) |     combined_concerns = dict(collection.write_concern.document.items()) | ||||||
|     combined_concerns.update(write_concerns) |     combined_concerns.update(write_concerns) | ||||||
|     yield collection.with_options(write_concern=WriteConcern(**combined_concerns)) |     yield collection.with_options(write_concern=WriteConcern(**combined_concerns)) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | @contextmanager | ||||||
|  | def set_read_write_concern(collection, write_concerns, read_concerns): | ||||||
|  |     combined_write_concerns = dict(collection.write_concern.document.items()) | ||||||
|  |  | ||||||
|  |     if write_concerns is not None: | ||||||
|  |         combined_write_concerns.update(write_concerns) | ||||||
|  |  | ||||||
|  |     combined_read_concerns = dict(collection.read_concern.document.items()) | ||||||
|  |  | ||||||
|  |     if read_concerns is not None: | ||||||
|  |         combined_read_concerns.update(read_concerns) | ||||||
|  |  | ||||||
|  |     yield collection.with_options( | ||||||
|  |         write_concern=WriteConcern(**combined_write_concerns), | ||||||
|  |         read_concern=ReadConcern(**combined_read_concerns), | ||||||
|  |     ) | ||||||
|   | |||||||
| @@ -1,9 +1,12 @@ | |||||||
| from bson import DBRef, SON | from bson import DBRef, SON | ||||||
| import six |  | ||||||
| from six import iteritems |  | ||||||
|  |  | ||||||
| from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList, | from mongoengine.base import ( | ||||||
|                               TopLevelDocumentMetaclass, get_document) |     BaseDict, | ||||||
|  |     BaseList, | ||||||
|  |     EmbeddedDocumentList, | ||||||
|  |     TopLevelDocumentMetaclass, | ||||||
|  |     get_document, | ||||||
|  | ) | ||||||
| from mongoengine.base.datastructures import LazyReference | from mongoengine.base.datastructures import LazyReference | ||||||
| from mongoengine.connection import get_db | from mongoengine.connection import get_db | ||||||
| from mongoengine.document import Document, EmbeddedDocument | from mongoengine.document import Document, EmbeddedDocument | ||||||
| @@ -11,7 +14,7 @@ from mongoengine.fields import DictField, ListField, MapField, ReferenceField | |||||||
| from mongoengine.queryset import QuerySet | from mongoengine.queryset import QuerySet | ||||||
|  |  | ||||||
|  |  | ||||||
| class DeReference(object): | class DeReference: | ||||||
|     def __call__(self, items, max_depth=1, instance=None, name=None): |     def __call__(self, items, max_depth=1, instance=None, name=None): | ||||||
|         """ |         """ | ||||||
|         Cheaply dereferences the items to a set depth. |         Cheaply dereferences the items to a set depth. | ||||||
| @@ -25,7 +28,7 @@ class DeReference(object): | |||||||
|             :class:`~mongoengine.base.ComplexBaseField` |             :class:`~mongoengine.base.ComplexBaseField` | ||||||
|         :param get: A boolean determining if being called by __get__ |         :param get: A boolean determining if being called by __get__ | ||||||
|         """ |         """ | ||||||
|         if items is None or isinstance(items, six.string_types): |         if items is None or isinstance(items, str): | ||||||
|             return items |             return items | ||||||
|  |  | ||||||
|         # cheapest way to convert a queryset to a list |         # cheapest way to convert a queryset to a list | ||||||
| @@ -36,21 +39,23 @@ class DeReference(object): | |||||||
|         self.max_depth = max_depth |         self.max_depth = max_depth | ||||||
|         doc_type = None |         doc_type = None | ||||||
|  |  | ||||||
|         if instance and isinstance(instance, (Document, EmbeddedDocument, |         if instance and isinstance( | ||||||
|                                               TopLevelDocumentMetaclass)): |             instance, (Document, EmbeddedDocument, TopLevelDocumentMetaclass) | ||||||
|  |         ): | ||||||
|             doc_type = instance._fields.get(name) |             doc_type = instance._fields.get(name) | ||||||
|             while hasattr(doc_type, 'field'): |             while hasattr(doc_type, "field"): | ||||||
|                 doc_type = doc_type.field |                 doc_type = doc_type.field | ||||||
|  |  | ||||||
|             if isinstance(doc_type, ReferenceField): |             if isinstance(doc_type, ReferenceField): | ||||||
|                 field = doc_type |                 field = doc_type | ||||||
|                 doc_type = doc_type.document_type |                 doc_type = doc_type.document_type | ||||||
|                 is_list = not hasattr(items, 'items') |                 is_list = not hasattr(items, "items") | ||||||
|  |  | ||||||
|                 if is_list and all([i.__class__ == doc_type for i in items]): |                 if is_list and all([i.__class__ == doc_type for i in items]): | ||||||
|                     return items |                     return items | ||||||
|                 elif not is_list and all( |                 elif not is_list and all( | ||||||
|                         [i.__class__ == doc_type for i in items.values()]): |                     [i.__class__ == doc_type for i in items.values()] | ||||||
|  |                 ): | ||||||
|                     return items |                     return items | ||||||
|                 elif not field.dbref: |                 elif not field.dbref: | ||||||
|                     # We must turn the ObjectIds into DBRefs |                     # We must turn the ObjectIds into DBRefs | ||||||
| @@ -72,7 +77,7 @@ class DeReference(object): | |||||||
|  |  | ||||||
|                     def _get_items_from_dict(items): |                     def _get_items_from_dict(items): | ||||||
|                         new_items = {} |                         new_items = {} | ||||||
|                         for k, v in iteritems(items): |                         for k, v in items.items(): | ||||||
|                             value = v |                             value = v | ||||||
|                             if isinstance(v, list): |                             if isinstance(v, list): | ||||||
|                                 value = _get_items_from_list(v) |                                 value = _get_items_from_list(v) | ||||||
| @@ -83,7 +88,7 @@ class DeReference(object): | |||||||
|                             new_items[k] = value |                             new_items[k] = value | ||||||
|                         return new_items |                         return new_items | ||||||
|  |  | ||||||
|                     if not hasattr(items, 'items'): |                     if not hasattr(items, "items"): | ||||||
|                         items = _get_items_from_list(items) |                         items = _get_items_from_list(items) | ||||||
|                     else: |                     else: | ||||||
|                         items = _get_items_from_dict(items) |                         items = _get_items_from_dict(items) | ||||||
| @@ -113,20 +118,26 @@ class DeReference(object): | |||||||
|         depth += 1 |         depth += 1 | ||||||
|         for item in iterator: |         for item in iterator: | ||||||
|             if isinstance(item, (Document, EmbeddedDocument)): |             if isinstance(item, (Document, EmbeddedDocument)): | ||||||
|                 for field_name, field in iteritems(item._fields): |                 for field_name, field in item._fields.items(): | ||||||
|                     v = item._data.get(field_name, None) |                     v = item._data.get(field_name, None) | ||||||
|                     if isinstance(v, LazyReference): |                     if isinstance(v, LazyReference): | ||||||
|                         # LazyReference inherits DBRef but should not be dereferenced here ! |                         # LazyReference inherits DBRef but should not be dereferenced here ! | ||||||
|                         continue |                         continue | ||||||
|                     elif isinstance(v, DBRef): |                     elif isinstance(v, DBRef): | ||||||
|                         reference_map.setdefault(field.document_type, set()).add(v.id) |                         reference_map.setdefault(field.document_type, set()).add(v.id) | ||||||
|                     elif isinstance(v, (dict, SON)) and '_ref' in v: |                     elif isinstance(v, (dict, SON)) and "_ref" in v: | ||||||
|                         reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id) |                         reference_map.setdefault(get_document(v["_cls"]), set()).add( | ||||||
|  |                             v["_ref"].id | ||||||
|  |                         ) | ||||||
|                     elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: |                     elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||||
|                         field_cls = getattr(getattr(field, 'field', None), 'document_type', None) |                         field_cls = getattr( | ||||||
|  |                             getattr(field, "field", None), "document_type", None | ||||||
|  |                         ) | ||||||
|                         references = self._find_references(v, depth) |                         references = self._find_references(v, depth) | ||||||
|                         for key, refs in iteritems(references): |                         for key, refs in references.items(): | ||||||
|                             if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): |                             if isinstance( | ||||||
|  |                                 field_cls, (Document, TopLevelDocumentMetaclass) | ||||||
|  |                             ): | ||||||
|                                 key = field_cls |                                 key = field_cls | ||||||
|                             reference_map.setdefault(key, set()).update(refs) |                             reference_map.setdefault(key, set()).update(refs) | ||||||
|             elif isinstance(item, LazyReference): |             elif isinstance(item, LazyReference): | ||||||
| @@ -134,11 +145,13 @@ class DeReference(object): | |||||||
|                 continue |                 continue | ||||||
|             elif isinstance(item, DBRef): |             elif isinstance(item, DBRef): | ||||||
|                 reference_map.setdefault(item.collection, set()).add(item.id) |                 reference_map.setdefault(item.collection, set()).add(item.id) | ||||||
|             elif isinstance(item, (dict, SON)) and '_ref' in item: |             elif isinstance(item, (dict, SON)) and "_ref" in item: | ||||||
|                 reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id) |                 reference_map.setdefault(get_document(item["_cls"]), set()).add( | ||||||
|  |                     item["_ref"].id | ||||||
|  |                 ) | ||||||
|             elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: |             elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: | ||||||
|                 references = self._find_references(item, depth - 1) |                 references = self._find_references(item, depth - 1) | ||||||
|                 for key, refs in iteritems(references): |                 for key, refs in references.items(): | ||||||
|                     reference_map.setdefault(key, set()).update(refs) |                     reference_map.setdefault(key, set()).update(refs) | ||||||
|  |  | ||||||
|         return reference_map |         return reference_map | ||||||
| @@ -147,40 +160,44 @@ class DeReference(object): | |||||||
|         """Fetch all references and convert to their document objects |         """Fetch all references and convert to their document objects | ||||||
|         """ |         """ | ||||||
|         object_map = {} |         object_map = {} | ||||||
|         for collection, dbrefs in iteritems(self.reference_map): |         for collection, dbrefs in self.reference_map.items(): | ||||||
|  |  | ||||||
|             # we use getattr instead of hasattr because hasattr swallows any exception under python2 |             # we use getattr instead of hasattr because hasattr swallows any exception under python2 | ||||||
|             # so it could hide nasty things without raising exceptions (cfr bug #1688)) |             # so it could hide nasty things without raising exceptions (cfr bug #1688)) | ||||||
|             ref_document_cls_exists = (getattr(collection, 'objects', None) is not None) |             ref_document_cls_exists = getattr(collection, "objects", None) is not None | ||||||
|  |  | ||||||
|             if ref_document_cls_exists: |             if ref_document_cls_exists: | ||||||
|                 col_name = collection._get_collection_name() |                 col_name = collection._get_collection_name() | ||||||
|                 refs = [dbref for dbref in dbrefs |                 refs = [ | ||||||
|                         if (col_name, dbref) not in object_map] |                     dbref for dbref in dbrefs if (col_name, dbref) not in object_map | ||||||
|  |                 ] | ||||||
|                 references = collection.objects.in_bulk(refs) |                 references = collection.objects.in_bulk(refs) | ||||||
|                 for key, doc in iteritems(references): |                 for key, doc in references.items(): | ||||||
|                     object_map[(col_name, key)] = doc |                     object_map[(col_name, key)] = doc | ||||||
|             else:  # Generic reference: use the refs data to convert to document |             else:  # Generic reference: use the refs data to convert to document | ||||||
|                 if isinstance(doc_type, (ListField, DictField, MapField)): |                 if isinstance(doc_type, (ListField, DictField, MapField)): | ||||||
|                     continue |                     continue | ||||||
|  |  | ||||||
|                 refs = [dbref for dbref in dbrefs |                 refs = [ | ||||||
|                         if (collection, dbref) not in object_map] |                     dbref for dbref in dbrefs if (collection, dbref) not in object_map | ||||||
|  |                 ] | ||||||
|  |  | ||||||
|                 if doc_type: |                 if doc_type: | ||||||
|                     references = doc_type._get_db()[collection].find({'_id': {'$in': refs}}) |                     references = doc_type._get_db()[collection].find( | ||||||
|  |                         {"_id": {"$in": refs}} | ||||||
|  |                     ) | ||||||
|                     for ref in references: |                     for ref in references: | ||||||
|                         doc = doc_type._from_son(ref) |                         doc = doc_type._from_son(ref) | ||||||
|                         object_map[(collection, doc.id)] = doc |                         object_map[(collection, doc.id)] = doc | ||||||
|                 else: |                 else: | ||||||
|                     references = get_db()[collection].find({'_id': {'$in': refs}}) |                     references = get_db()[collection].find({"_id": {"$in": refs}}) | ||||||
|                     for ref in references: |                     for ref in references: | ||||||
|                         if '_cls' in ref: |                         if "_cls" in ref: | ||||||
|                             doc = get_document(ref['_cls'])._from_son(ref) |                             doc = get_document(ref["_cls"])._from_son(ref) | ||||||
|                         elif doc_type is None: |                         elif doc_type is None: | ||||||
|                             doc = get_document( |                             doc = get_document( | ||||||
|                                 ''.join(x.capitalize() |                                 "".join(x.capitalize() for x in collection.split("_")) | ||||||
|                                         for x in collection.split('_')))._from_son(ref) |                             )._from_son(ref) | ||||||
|                         else: |                         else: | ||||||
|                             doc = doc_type._from_son(ref) |                             doc = doc_type._from_son(ref) | ||||||
|                         object_map[(collection, doc.id)] = doc |                         object_map[(collection, doc.id)] = doc | ||||||
| @@ -208,19 +225,20 @@ class DeReference(object): | |||||||
|                     return BaseList(items, instance, name) |                     return BaseList(items, instance, name) | ||||||
|  |  | ||||||
|         if isinstance(items, (dict, SON)): |         if isinstance(items, (dict, SON)): | ||||||
|             if '_ref' in items: |             if "_ref" in items: | ||||||
|                 return self.object_map.get( |                 return self.object_map.get( | ||||||
|                     (items['_ref'].collection, items['_ref'].id), items) |                     (items["_ref"].collection, items["_ref"].id), items | ||||||
|             elif '_cls' in items: |                 ) | ||||||
|                 doc = get_document(items['_cls'])._from_son(items) |             elif "_cls" in items: | ||||||
|                 _cls = doc._data.pop('_cls', None) |                 doc = get_document(items["_cls"])._from_son(items) | ||||||
|                 del items['_cls'] |                 _cls = doc._data.pop("_cls", None) | ||||||
|  |                 del items["_cls"] | ||||||
|                 doc._data = self._attach_objects(doc._data, depth, doc, None) |                 doc._data = self._attach_objects(doc._data, depth, doc, None) | ||||||
|                 if _cls is not None: |                 if _cls is not None: | ||||||
|                     doc._data['_cls'] = _cls |                     doc._data["_cls"] = _cls | ||||||
|                 return doc |                 return doc | ||||||
|  |  | ||||||
|         if not hasattr(items, 'items'): |         if not hasattr(items, "items"): | ||||||
|             is_list = True |             is_list = True | ||||||
|             list_type = BaseList |             list_type = BaseList | ||||||
|             if isinstance(items, EmbeddedDocumentList): |             if isinstance(items, EmbeddedDocumentList): | ||||||
| @@ -230,7 +248,7 @@ class DeReference(object): | |||||||
|             data = [] |             data = [] | ||||||
|         else: |         else: | ||||||
|             is_list = False |             is_list = False | ||||||
|             iterator = iteritems(items) |             iterator = items.items() | ||||||
|             data = {} |             data = {} | ||||||
|  |  | ||||||
|         depth += 1 |         depth += 1 | ||||||
| @@ -247,17 +265,23 @@ class DeReference(object): | |||||||
|                     v = data[k]._data.get(field_name, None) |                     v = data[k]._data.get(field_name, None) | ||||||
|                     if isinstance(v, DBRef): |                     if isinstance(v, DBRef): | ||||||
|                         data[k]._data[field_name] = self.object_map.get( |                         data[k]._data[field_name] = self.object_map.get( | ||||||
|                             (v.collection, v.id), v) |                             (v.collection, v.id), v | ||||||
|                     elif isinstance(v, (dict, SON)) and '_ref' in v: |                         ) | ||||||
|  |                     elif isinstance(v, (dict, SON)) and "_ref" in v: | ||||||
|                         data[k]._data[field_name] = self.object_map.get( |                         data[k]._data[field_name] = self.object_map.get( | ||||||
|                             (v['_ref'].collection, v['_ref'].id), v) |                             (v["_ref"].collection, v["_ref"].id), v | ||||||
|  |                         ) | ||||||
|                     elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: |                     elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||||
|                         item_name = six.text_type('{0}.{1}.{2}').format(name, k, field_name) |                         item_name = "{}.{}.{}".format(name, k, field_name) | ||||||
|                         data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=item_name) |                         data[k]._data[field_name] = self._attach_objects( | ||||||
|  |                             v, depth, instance=instance, name=item_name | ||||||
|  |                         ) | ||||||
|             elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: |             elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||||
|                 item_name = '%s.%s' % (name, k) if name else name |                 item_name = "{}.{}".format(name, k) if name else name | ||||||
|                 data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name) |                 data[k] = self._attach_objects( | ||||||
|             elif isinstance(v, DBRef) and hasattr(v, 'id'): |                     v, depth - 1, instance=instance, name=item_name | ||||||
|  |                 ) | ||||||
|  |             elif isinstance(v, DBRef) and hasattr(v, "id"): | ||||||
|                 data[k] = self.object_map.get((v.collection, v.id), v) |                 data[k] = self.object_map.get((v.collection, v.id), v) | ||||||
|  |  | ||||||
|         if instance and name: |         if instance and name: | ||||||
|   | |||||||
| @@ -4,46 +4,57 @@ import warnings | |||||||
| from bson.dbref import DBRef | from bson.dbref import DBRef | ||||||
| import pymongo | import pymongo | ||||||
| from pymongo.read_preferences import ReadPreference | from pymongo.read_preferences import ReadPreference | ||||||
| import six |  | ||||||
| from six import iteritems |  | ||||||
|  |  | ||||||
| from mongoengine import signals | from mongoengine import signals | ||||||
| from mongoengine.base import (BaseDict, BaseDocument, BaseList, | from mongoengine.base import ( | ||||||
|                               DocumentMetaclass, EmbeddedDocumentList, |     BaseDict, | ||||||
|                               TopLevelDocumentMetaclass, get_document) |     BaseDocument, | ||||||
|  |     BaseList, | ||||||
|  |     DocumentMetaclass, | ||||||
|  |     EmbeddedDocumentList, | ||||||
|  |     TopLevelDocumentMetaclass, | ||||||
|  |     get_document, | ||||||
|  | ) | ||||||
| from mongoengine.common import _import_class | from mongoengine.common import _import_class | ||||||
| from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | ||||||
| from mongoengine.context_managers import (set_write_concern, | from mongoengine.context_managers import set_write_concern, switch_collection, switch_db | ||||||
|                                           switch_collection, | from mongoengine.errors import ( | ||||||
|                                           switch_db) |     InvalidDocumentError, | ||||||
| from mongoengine.errors import (InvalidDocumentError, InvalidQueryError, |     InvalidQueryError, | ||||||
|                                 SaveConditionError) |     SaveConditionError, | ||||||
|  | ) | ||||||
| from mongoengine.pymongo_support import list_collection_names | from mongoengine.pymongo_support import list_collection_names | ||||||
| from mongoengine.queryset import (NotUniqueError, OperationError, | from mongoengine.queryset import NotUniqueError, OperationError, QuerySet, transform | ||||||
|                                   QuerySet, transform) |  | ||||||
|  |  | ||||||
| __all__ = ('Document', 'EmbeddedDocument', 'DynamicDocument', | __all__ = ( | ||||||
|            'DynamicEmbeddedDocument', 'OperationError', |     "Document", | ||||||
|            'InvalidCollectionError', 'NotUniqueError', 'MapReduceDocument') |     "EmbeddedDocument", | ||||||
|  |     "DynamicDocument", | ||||||
|  |     "DynamicEmbeddedDocument", | ||||||
|  |     "OperationError", | ||||||
|  |     "InvalidCollectionError", | ||||||
|  |     "NotUniqueError", | ||||||
|  |     "MapReduceDocument", | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
| def includes_cls(fields): | def includes_cls(fields): | ||||||
|     """Helper function used for ensuring and comparing indexes.""" |     """Helper function used for ensuring and comparing indexes.""" | ||||||
|     first_field = None |     first_field = None | ||||||
|     if len(fields): |     if len(fields): | ||||||
|         if isinstance(fields[0], six.string_types): |         if isinstance(fields[0], str): | ||||||
|             first_field = fields[0] |             first_field = fields[0] | ||||||
|         elif isinstance(fields[0], (list, tuple)) and len(fields[0]): |         elif isinstance(fields[0], (list, tuple)) and len(fields[0]): | ||||||
|             first_field = fields[0][0] |             first_field = fields[0][0] | ||||||
|     return first_field == '_cls' |     return first_field == "_cls" | ||||||
|  |  | ||||||
|  |  | ||||||
| class InvalidCollectionError(Exception): | class InvalidCollectionError(Exception): | ||||||
|     pass |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
| class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): | class EmbeddedDocument(BaseDocument, metaclass=DocumentMetaclass): | ||||||
|     """A :class:`~mongoengine.Document` that isn't stored in its own |     r"""A :class:`~mongoengine.Document` that isn't stored in its own | ||||||
|     collection.  :class:`~mongoengine.EmbeddedDocument`\ s should be used as |     collection.  :class:`~mongoengine.EmbeddedDocument`\ s should be used as | ||||||
|     fields on :class:`~mongoengine.Document`\ s through the |     fields on :class:`~mongoengine.Document`\ s through the | ||||||
|     :class:`~mongoengine.EmbeddedDocumentField` field type. |     :class:`~mongoengine.EmbeddedDocumentField` field type. | ||||||
| @@ -56,9 +67,8 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): | |||||||
|     :attr:`meta` dictionary. |     :attr:`meta` dictionary. | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     __slots__ = ('_instance', ) |     __slots__ = ("_instance",) | ||||||
|  |  | ||||||
|     # The __metaclass__ attribute is removed by 2to3 when running with Python3 |  | ||||||
|     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 |     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||||
|     my_metaclass = DocumentMetaclass |     my_metaclass = DocumentMetaclass | ||||||
|  |  | ||||||
| @@ -69,7 +79,7 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): | |||||||
|     __hash__ = None |     __hash__ = None | ||||||
|  |  | ||||||
|     def __init__(self, *args, **kwargs): |     def __init__(self, *args, **kwargs): | ||||||
|         super(EmbeddedDocument, self).__init__(*args, **kwargs) |         super().__init__(*args, **kwargs) | ||||||
|         self._instance = None |         self._instance = None | ||||||
|         self._changed_fields = [] |         self._changed_fields = [] | ||||||
|  |  | ||||||
| @@ -82,16 +92,16 @@ class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): | |||||||
|         return not self.__eq__(other) |         return not self.__eq__(other) | ||||||
|  |  | ||||||
|     def to_mongo(self, *args, **kwargs): |     def to_mongo(self, *args, **kwargs): | ||||||
|         data = super(EmbeddedDocument, self).to_mongo(*args, **kwargs) |         data = super().to_mongo(*args, **kwargs) | ||||||
|  |  | ||||||
|         # remove _id from the SON if it's in it and it's None |         # remove _id from the SON if it's in it and it's None | ||||||
|         if '_id' in data and data['_id'] is None: |         if "_id" in data and data["_id"] is None: | ||||||
|             del data['_id'] |             del data["_id"] | ||||||
|  |  | ||||||
|         return data |         return data | ||||||
|  |  | ||||||
|  |  | ||||||
| class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass): | ||||||
|     """The base class used for defining the structure and properties of |     """The base class used for defining the structure and properties of | ||||||
|     collections of documents stored in MongoDB. Inherit from this class, and |     collections of documents stored in MongoDB. Inherit from this class, and | ||||||
|     add fields as class attributes to define a document's structure. |     add fields as class attributes to define a document's structure. | ||||||
| @@ -143,23 +153,22 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|     in the :attr:`meta` dictionary. |     in the :attr:`meta` dictionary. | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     # The __metaclass__ attribute is removed by 2to3 when running with Python3 |  | ||||||
|     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 |     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||||
|     my_metaclass = TopLevelDocumentMetaclass |     my_metaclass = TopLevelDocumentMetaclass | ||||||
|  |  | ||||||
|     __slots__ = ('__objects',) |     __slots__ = ("__objects",) | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def pk(self): |     def pk(self): | ||||||
|         """Get the primary key.""" |         """Get the primary key.""" | ||||||
|         if 'id_field' not in self._meta: |         if "id_field" not in self._meta: | ||||||
|             return None |             return None | ||||||
|         return getattr(self, self._meta['id_field']) |         return getattr(self, self._meta["id_field"]) | ||||||
|  |  | ||||||
|     @pk.setter |     @pk.setter | ||||||
|     def pk(self, value): |     def pk(self, value): | ||||||
|         """Set the primary key.""" |         """Set the primary key.""" | ||||||
|         return setattr(self, self._meta['id_field'], value) |         return setattr(self, self._meta["id_field"], value) | ||||||
|  |  | ||||||
|     def __hash__(self): |     def __hash__(self): | ||||||
|         """Return the hash based on the PK of this document. If it's new |         """Return the hash based on the PK of this document. If it's new | ||||||
| @@ -173,7 +182,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|     @classmethod |     @classmethod | ||||||
|     def _get_db(cls): |     def _get_db(cls): | ||||||
|         """Some Model using other db_alias""" |         """Some Model using other db_alias""" | ||||||
|         return get_db(cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME)) |         return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME)) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def _disconnect(cls): |     def _disconnect(cls): | ||||||
| @@ -182,12 +191,17 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def _get_collection(cls): |     def _get_collection(cls): | ||||||
|         """Return the corresponding PyMongo collection of this document. |         """Return the PyMongo collection corresponding to this document. | ||||||
|         Upon the first call, it will ensure that indexes gets created. The returned collection then gets cached |  | ||||||
|  |         Upon first call, this method: | ||||||
|  |         1. Initializes a :class:`~pymongo.collection.Collection` corresponding | ||||||
|  |            to this document. | ||||||
|  |         2. Creates indexes defined in this document's :attr:`meta` dictionary. | ||||||
|  |            This happens only if `auto_create_index` is True. | ||||||
|         """ |         """ | ||||||
|         if not hasattr(cls, '_collection') or cls._collection is None: |         if not hasattr(cls, "_collection") or cls._collection is None: | ||||||
|             # Get the collection, either capped or regular. |             # Get the collection, either capped or regular. | ||||||
|             if cls._meta.get('max_size') or cls._meta.get('max_documents'): |             if cls._meta.get("max_size") or cls._meta.get("max_documents"): | ||||||
|                 cls._collection = cls._get_capped_collection() |                 cls._collection = cls._get_capped_collection() | ||||||
|             else: |             else: | ||||||
|                 db = cls._get_db() |                 db = cls._get_db() | ||||||
| @@ -198,8 +212,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|             # set to False. |             # set to False. | ||||||
|             # Also there is no need to ensure indexes on slave. |             # Also there is no need to ensure indexes on slave. | ||||||
|             db = cls._get_db() |             db = cls._get_db() | ||||||
|             if cls._meta.get('auto_create_index', True) and\ |             if cls._meta.get("auto_create_index", True) and db.client.is_primary: | ||||||
|                     db.client.is_primary: |  | ||||||
|                 cls.ensure_indexes() |                 cls.ensure_indexes() | ||||||
|  |  | ||||||
|         return cls._collection |         return cls._collection | ||||||
| @@ -211,8 +224,8 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         collection_name = cls._get_collection_name() |         collection_name = cls._get_collection_name() | ||||||
|  |  | ||||||
|         # Get max document limit and max byte size from meta. |         # Get max document limit and max byte size from meta. | ||||||
|         max_size = cls._meta.get('max_size') or 10 * 2 ** 20  # 10MB default |         max_size = cls._meta.get("max_size") or 10 * 2 ** 20  # 10MB default | ||||||
|         max_documents = cls._meta.get('max_documents') |         max_documents = cls._meta.get("max_documents") | ||||||
|  |  | ||||||
|         # MongoDB will automatically raise the size to make it a multiple of |         # MongoDB will automatically raise the size to make it a multiple of | ||||||
|         # 256 bytes. We raise it here ourselves to be able to reliably compare |         # 256 bytes. We raise it here ourselves to be able to reliably compare | ||||||
| @@ -222,37 +235,36 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|  |  | ||||||
|         # If the collection already exists and has different options |         # If the collection already exists and has different options | ||||||
|         # (i.e. isn't capped or has different max/size), raise an error. |         # (i.e. isn't capped or has different max/size), raise an error. | ||||||
|         if collection_name in list_collection_names(db, include_system_collections=True): |         if collection_name in list_collection_names( | ||||||
|  |             db, include_system_collections=True | ||||||
|  |         ): | ||||||
|             collection = db[collection_name] |             collection = db[collection_name] | ||||||
|             options = collection.options() |             options = collection.options() | ||||||
|             if ( |             if options.get("max") != max_documents or options.get("size") != max_size: | ||||||
|                 options.get('max') != max_documents or |  | ||||||
|                 options.get('size') != max_size |  | ||||||
|             ): |  | ||||||
|                 raise InvalidCollectionError( |                 raise InvalidCollectionError( | ||||||
|                     'Cannot create collection "{}" as a capped ' |                     'Cannot create collection "{}" as a capped ' | ||||||
|                     'collection as it already exists'.format(cls._collection) |                     "collection as it already exists".format(cls._collection) | ||||||
|                 ) |                 ) | ||||||
|  |  | ||||||
|             return collection |             return collection | ||||||
|  |  | ||||||
|         # Create a new capped collection. |         # Create a new capped collection. | ||||||
|         opts = {'capped': True, 'size': max_size} |         opts = {"capped": True, "size": max_size} | ||||||
|         if max_documents: |         if max_documents: | ||||||
|             opts['max'] = max_documents |             opts["max"] = max_documents | ||||||
|  |  | ||||||
|         return db.create_collection(collection_name, **opts) |         return db.create_collection(collection_name, **opts) | ||||||
|  |  | ||||||
|     def to_mongo(self, *args, **kwargs): |     def to_mongo(self, *args, **kwargs): | ||||||
|         data = super(Document, self).to_mongo(*args, **kwargs) |         data = super().to_mongo(*args, **kwargs) | ||||||
|  |  | ||||||
|         # If '_id' is None, try and set it from self._data. If that |         # If '_id' is None, try and set it from self._data. If that | ||||||
|         # doesn't exist either, remove '_id' from the SON completely. |         # doesn't exist either, remove '_id' from the SON completely. | ||||||
|         if data['_id'] is None: |         if data["_id"] is None: | ||||||
|             if self._data.get('id') is None: |             if self._data.get("id") is None: | ||||||
|                 del data['_id'] |                 del data["_id"] | ||||||
|             else: |             else: | ||||||
|                 data['_id'] = self._data['id'] |                 data["_id"] = self._data["id"] | ||||||
|  |  | ||||||
|         return data |         return data | ||||||
|  |  | ||||||
| @@ -274,15 +286,17 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|             query = {} |             query = {} | ||||||
|  |  | ||||||
|         if self.pk is None: |         if self.pk is None: | ||||||
|             raise InvalidDocumentError('The document does not have a primary key.') |             raise InvalidDocumentError("The document does not have a primary key.") | ||||||
|  |  | ||||||
|         id_field = self._meta['id_field'] |         id_field = self._meta["id_field"] | ||||||
|         query = query.copy() if isinstance(query, dict) else query.to_query(self) |         query = query.copy() if isinstance(query, dict) else query.to_query(self) | ||||||
|  |  | ||||||
|         if id_field not in query: |         if id_field not in query: | ||||||
|             query[id_field] = self.pk |             query[id_field] = self.pk | ||||||
|         elif query[id_field] != self.pk: |         elif query[id_field] != self.pk: | ||||||
|             raise InvalidQueryError('Invalid document modify query: it must modify only this document.') |             raise InvalidQueryError( | ||||||
|  |                 "Invalid document modify query: it must modify only this document." | ||||||
|  |             ) | ||||||
|  |  | ||||||
|         # Need to add shard key to query, or you get an error |         # Need to add shard key to query, or you get an error | ||||||
|         query.update(self._object_key) |         query.update(self._object_key) | ||||||
| @@ -299,12 +313,22 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|  |  | ||||||
|         return True |         return True | ||||||
|  |  | ||||||
|     def save(self, force_insert=False, validate=True, clean=True, |     def save( | ||||||
|              write_concern=None, cascade=None, cascade_kwargs=None, |         self, | ||||||
|              _refs=None, save_condition=None, signal_kwargs=None, **kwargs): |         force_insert=False, | ||||||
|  |         validate=True, | ||||||
|  |         clean=True, | ||||||
|  |         write_concern=None, | ||||||
|  |         cascade=None, | ||||||
|  |         cascade_kwargs=None, | ||||||
|  |         _refs=None, | ||||||
|  |         save_condition=None, | ||||||
|  |         signal_kwargs=None, | ||||||
|  |         **kwargs | ||||||
|  |     ): | ||||||
|         """Save the :class:`~mongoengine.Document` to the database. If the |         """Save the :class:`~mongoengine.Document` to the database. If the | ||||||
|         document already exists, it will be updated, otherwise it will be |         document already exists, it will be updated, otherwise it will be | ||||||
|         created. |         created. Returns the saved object instance. | ||||||
|  |  | ||||||
|         :param force_insert: only try to create a new document, don't allow |         :param force_insert: only try to create a new document, don't allow | ||||||
|             updates of existing documents. |             updates of existing documents. | ||||||
| @@ -355,8 +379,8 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         """ |         """ | ||||||
|         signal_kwargs = signal_kwargs or {} |         signal_kwargs = signal_kwargs or {} | ||||||
|  |  | ||||||
|         if self._meta.get('abstract'): |         if self._meta.get("abstract"): | ||||||
|             raise InvalidDocumentError('Cannot save an abstract document.') |             raise InvalidDocumentError("Cannot save an abstract document.") | ||||||
|  |  | ||||||
|         signals.pre_save.send(self.__class__, document=self, **signal_kwargs) |         signals.pre_save.send(self.__class__, document=self, **signal_kwargs) | ||||||
|  |  | ||||||
| @@ -366,15 +390,16 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         if write_concern is None: |         if write_concern is None: | ||||||
|             write_concern = {} |             write_concern = {} | ||||||
|  |  | ||||||
|         doc_id = self.to_mongo(fields=['id']) |         doc_id = self.to_mongo(fields=[self._meta["id_field"]]) | ||||||
|         created = ('_id' not in doc_id or self._created or force_insert) |         created = "_id" not in doc_id or self._created or force_insert | ||||||
|  |  | ||||||
|         signals.pre_save_post_validation.send(self.__class__, document=self, |         signals.pre_save_post_validation.send( | ||||||
|                                               created=created, **signal_kwargs) |             self.__class__, document=self, created=created, **signal_kwargs | ||||||
|  |         ) | ||||||
|         # it might be refreshed by the pre_save_post_validation hook, e.g., for etag generation |         # it might be refreshed by the pre_save_post_validation hook, e.g., for etag generation | ||||||
|         doc = self.to_mongo() |         doc = self.to_mongo() | ||||||
|  |  | ||||||
|         if self._meta.get('auto_create_index', True): |         if self._meta.get("auto_create_index", True): | ||||||
|             self.ensure_indexes() |             self.ensure_indexes() | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
| @@ -382,44 +407,45 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|             if created: |             if created: | ||||||
|                 object_id = self._save_create(doc, force_insert, write_concern) |                 object_id = self._save_create(doc, force_insert, write_concern) | ||||||
|             else: |             else: | ||||||
|                 object_id, created = self._save_update(doc, save_condition, |                 object_id, created = self._save_update( | ||||||
|                                                        write_concern) |                     doc, save_condition, write_concern | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|             if cascade is None: |             if cascade is None: | ||||||
|                 cascade = (self._meta.get('cascade', False) or |                 cascade = self._meta.get("cascade", False) or cascade_kwargs is not None | ||||||
|                            cascade_kwargs is not None) |  | ||||||
|  |  | ||||||
|             if cascade: |             if cascade: | ||||||
|                 kwargs = { |                 kwargs = { | ||||||
|                     'force_insert': force_insert, |                     "force_insert": force_insert, | ||||||
|                     'validate': validate, |                     "validate": validate, | ||||||
|                     'write_concern': write_concern, |                     "write_concern": write_concern, | ||||||
|                     'cascade': cascade |                     "cascade": cascade, | ||||||
|                 } |                 } | ||||||
|                 if cascade_kwargs:  # Allow granular control over cascades |                 if cascade_kwargs:  # Allow granular control over cascades | ||||||
|                     kwargs.update(cascade_kwargs) |                     kwargs.update(cascade_kwargs) | ||||||
|                 kwargs['_refs'] = _refs |                 kwargs["_refs"] = _refs | ||||||
|                 self.cascade_save(**kwargs) |                 self.cascade_save(**kwargs) | ||||||
|  |  | ||||||
|         except pymongo.errors.DuplicateKeyError as err: |         except pymongo.errors.DuplicateKeyError as err: | ||||||
|             message = u'Tried to save duplicate unique keys (%s)' |             message = "Tried to save duplicate unique keys (%s)" | ||||||
|             raise NotUniqueError(message % six.text_type(err)) |             raise NotUniqueError(message % err) | ||||||
|         except pymongo.errors.OperationFailure as err: |         except pymongo.errors.OperationFailure as err: | ||||||
|             message = 'Could not save document (%s)' |             message = "Could not save document (%s)" | ||||||
|             if re.match('^E1100[01] duplicate key', six.text_type(err)): |             if re.match("^E1100[01] duplicate key", str(err)): | ||||||
|                 # E11000 - duplicate key error index |                 # E11000 - duplicate key error index | ||||||
|                 # E11001 - duplicate key on update |                 # E11001 - duplicate key on update | ||||||
|                 message = u'Tried to save duplicate unique keys (%s)' |                 message = "Tried to save duplicate unique keys (%s)" | ||||||
|                 raise NotUniqueError(message % six.text_type(err)) |                 raise NotUniqueError(message % err) | ||||||
|             raise OperationError(message % six.text_type(err)) |             raise OperationError(message % err) | ||||||
|  |  | ||||||
|         # Make sure we store the PK on this document now that it's saved |         # Make sure we store the PK on this document now that it's saved | ||||||
|         id_field = self._meta['id_field'] |         id_field = self._meta["id_field"] | ||||||
|         if created or id_field not in self._meta.get('shard_key', []): |         if created or id_field not in self._meta.get("shard_key", []): | ||||||
|             self[id_field] = self._fields[id_field].to_python(object_id) |             self[id_field] = self._fields[id_field].to_python(object_id) | ||||||
|  |  | ||||||
|         signals.post_save.send(self.__class__, document=self, |         signals.post_save.send( | ||||||
|                                created=created, **signal_kwargs) |             self.__class__, document=self, created=created, **signal_kwargs | ||||||
|  |         ) | ||||||
|  |  | ||||||
|         self._clear_changed_fields() |         self._clear_changed_fields() | ||||||
|         self._created = False |         self._created = False | ||||||
| @@ -437,11 +463,12 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|                 return wc_collection.insert_one(doc).inserted_id |                 return wc_collection.insert_one(doc).inserted_id | ||||||
|             # insert_one will provoke UniqueError alongside save does not |             # insert_one will provoke UniqueError alongside save does not | ||||||
|             # therefore, it need to catch and call replace_one. |             # therefore, it need to catch and call replace_one. | ||||||
|             if '_id' in doc: |             if "_id" in doc: | ||||||
|                 raw_object = wc_collection.find_one_and_replace( |                 raw_object = wc_collection.find_one_and_replace( | ||||||
|                     {'_id': doc['_id']}, doc) |                     {"_id": doc["_id"]}, doc | ||||||
|  |                 ) | ||||||
|                 if raw_object: |                 if raw_object: | ||||||
|                     return doc['_id'] |                     return doc["_id"] | ||||||
|  |  | ||||||
|             object_id = wc_collection.insert_one(doc).inserted_id |             object_id = wc_collection.insert_one(doc).inserted_id | ||||||
|  |  | ||||||
| @@ -456,9 +483,9 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|  |  | ||||||
|         update_doc = {} |         update_doc = {} | ||||||
|         if updates: |         if updates: | ||||||
|             update_doc['$set'] = updates |             update_doc["$set"] = updates | ||||||
|         if removals: |         if removals: | ||||||
|             update_doc['$unset'] = removals |             update_doc["$unset"] = removals | ||||||
|  |  | ||||||
|         return update_doc |         return update_doc | ||||||
|  |  | ||||||
| @@ -468,39 +495,38 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         Helper method, should only be used inside save(). |         Helper method, should only be used inside save(). | ||||||
|         """ |         """ | ||||||
|         collection = self._get_collection() |         collection = self._get_collection() | ||||||
|         object_id = doc['_id'] |         object_id = doc["_id"] | ||||||
|         created = False |         created = False | ||||||
|  |  | ||||||
|         select_dict = {} |         select_dict = {} | ||||||
|         if save_condition is not None: |         if save_condition is not None: | ||||||
|             select_dict = transform.query(self.__class__, **save_condition) |             select_dict = transform.query(self.__class__, **save_condition) | ||||||
|  |  | ||||||
|         select_dict['_id'] = object_id |         select_dict["_id"] = object_id | ||||||
|  |  | ||||||
|         # Need to add shard key to query, or you get an error |         # Need to add shard key to query, or you get an error | ||||||
|         shard_key = self._meta.get('shard_key', tuple()) |         shard_key = self._meta.get("shard_key", tuple()) | ||||||
|         for k in shard_key: |         for k in shard_key: | ||||||
|             path = self._lookup_field(k.split('.')) |             path = self._lookup_field(k.split(".")) | ||||||
|             actual_key = [p.db_field for p in path] |             actual_key = [p.db_field for p in path] | ||||||
|             val = doc |             val = doc | ||||||
|             for ak in actual_key: |             for ak in actual_key: | ||||||
|                 val = val[ak] |                 val = val[ak] | ||||||
|             select_dict['.'.join(actual_key)] = val |             select_dict[".".join(actual_key)] = val | ||||||
|  |  | ||||||
|         update_doc = self._get_update_doc() |         update_doc = self._get_update_doc() | ||||||
|         if update_doc: |         if update_doc: | ||||||
|             upsert = save_condition is None |             upsert = save_condition is None | ||||||
|             with set_write_concern(collection, write_concern) as wc_collection: |             with set_write_concern(collection, write_concern) as wc_collection: | ||||||
|                 last_error = wc_collection.update_one( |                 last_error = wc_collection.update_one( | ||||||
|                     select_dict, |                     select_dict, update_doc, upsert=upsert | ||||||
|                     update_doc, |  | ||||||
|                     upsert=upsert |  | ||||||
|                 ).raw_result |                 ).raw_result | ||||||
|             if not upsert and last_error['n'] == 0: |             if not upsert and last_error["n"] == 0: | ||||||
|                 raise SaveConditionError('Race condition preventing' |                 raise SaveConditionError( | ||||||
|                                          ' document update detected') |                     "Race condition preventing document update detected" | ||||||
|  |                 ) | ||||||
|             if last_error is not None: |             if last_error is not None: | ||||||
|                 updated_existing = last_error.get('updatedExisting') |                 updated_existing = last_error.get("updatedExisting") | ||||||
|                 if updated_existing is False: |                 if updated_existing is False: | ||||||
|                     created = True |                     created = True | ||||||
|                     # !!! This is bad, means we accidentally created a new, |                     # !!! This is bad, means we accidentally created a new, | ||||||
| @@ -513,24 +539,23 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         """Recursively save any references and generic references on the |         """Recursively save any references and generic references on the | ||||||
|         document. |         document. | ||||||
|         """ |         """ | ||||||
|         _refs = kwargs.get('_refs') or [] |         _refs = kwargs.get("_refs") or [] | ||||||
|  |  | ||||||
|         ReferenceField = _import_class('ReferenceField') |         ReferenceField = _import_class("ReferenceField") | ||||||
|         GenericReferenceField = _import_class('GenericReferenceField') |         GenericReferenceField = _import_class("GenericReferenceField") | ||||||
|  |  | ||||||
|         for name, cls in self._fields.items(): |         for name, cls in self._fields.items(): | ||||||
|             if not isinstance(cls, (ReferenceField, |             if not isinstance(cls, (ReferenceField, GenericReferenceField)): | ||||||
|                                     GenericReferenceField)): |  | ||||||
|                 continue |                 continue | ||||||
|  |  | ||||||
|             ref = self._data.get(name) |             ref = self._data.get(name) | ||||||
|             if not ref or isinstance(ref, DBRef): |             if not ref or isinstance(ref, DBRef): | ||||||
|                 continue |                 continue | ||||||
|  |  | ||||||
|             if not getattr(ref, '_changed_fields', True): |             if not getattr(ref, "_changed_fields", True): | ||||||
|                 continue |                 continue | ||||||
|  |  | ||||||
|             ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data)) |             ref_id = "{},{}".format(ref.__class__.__name__, str(ref._data)) | ||||||
|             if ref and ref_id not in _refs: |             if ref and ref_id not in _refs: | ||||||
|                 _refs.append(ref_id) |                 _refs.append(ref_id) | ||||||
|                 kwargs["_refs"] = _refs |                 kwargs["_refs"] = _refs | ||||||
| @@ -539,27 +564,31 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def _qs(self): |     def _qs(self): | ||||||
|         """Return the queryset to use for updating / reloading / deletions.""" |         """Return the default queryset corresponding to this document.""" | ||||||
|         if not hasattr(self, '__objects'): |         if not hasattr(self, "__objects"): | ||||||
|             self.__objects = QuerySet(self, self._get_collection()) |             self.__objects = QuerySet(self, self._get_collection()) | ||||||
|         return self.__objects |         return self.__objects | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def _object_key(self): |     def _object_key(self): | ||||||
|         """Get the query dict that can be used to fetch this object from |         """Return a query dict that can be used to fetch this document. | ||||||
|         the database. Most of the time it's a simple PK lookup, but in |  | ||||||
|         case of a sharded collection with a compound shard key, it can |         Most of the time the dict is a simple PK lookup, but in case of | ||||||
|         contain a more complex query. |         a sharded collection with a compound shard key, it can contain a more | ||||||
|  |         complex query. | ||||||
|  |  | ||||||
|  |         Note that the dict returned by this method uses MongoEngine field | ||||||
|  |         names instead of PyMongo field names (e.g. "pk" instead of "_id", | ||||||
|  |         "some__nested__field" instead of "some.nested.field", etc.). | ||||||
|         """ |         """ | ||||||
|         select_dict = {'pk': self.pk} |         select_dict = {"pk": self.pk} | ||||||
|         shard_key = self.__class__._meta.get('shard_key', tuple()) |         shard_key = self.__class__._meta.get("shard_key", tuple()) | ||||||
|         for k in shard_key: |         for k in shard_key: | ||||||
|             path = self._lookup_field(k.split('.')) |  | ||||||
|             actual_key = [p.db_field for p in path] |  | ||||||
|             val = self |             val = self | ||||||
|             for ak in actual_key: |             field_parts = k.split(".") | ||||||
|                 val = getattr(val, ak) |             for part in field_parts: | ||||||
|             select_dict['__'.join(actual_key)] = val |                 val = getattr(val, part) | ||||||
|  |             select_dict["__".join(field_parts)] = val | ||||||
|         return select_dict |         return select_dict | ||||||
|  |  | ||||||
|     def update(self, **kwargs): |     def update(self, **kwargs): | ||||||
| @@ -570,14 +599,13 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         been saved. |         been saved. | ||||||
|         """ |         """ | ||||||
|         if self.pk is None: |         if self.pk is None: | ||||||
|             if kwargs.get('upsert', False): |             if kwargs.get("upsert", False): | ||||||
|                 query = self.to_mongo() |                 query = self.to_mongo() | ||||||
|                 if '_cls' in query: |                 if "_cls" in query: | ||||||
|                     del query['_cls'] |                     del query["_cls"] | ||||||
|                 return self._qs.filter(**query).update_one(**kwargs) |                 return self._qs.filter(**query).update_one(**kwargs) | ||||||
|             else: |             else: | ||||||
|                 raise OperationError( |                 raise OperationError("attempt to update a document not yet saved") | ||||||
|                     'attempt to update a document not yet saved') |  | ||||||
|  |  | ||||||
|         # Need to add shard key to query, or you get an error |         # Need to add shard key to query, or you get an error | ||||||
|         return self._qs.filter(**self._object_key).update_one(**kwargs) |         return self._qs.filter(**self._object_key).update_one(**kwargs) | ||||||
| @@ -601,16 +629,17 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         signals.pre_delete.send(self.__class__, document=self, **signal_kwargs) |         signals.pre_delete.send(self.__class__, document=self, **signal_kwargs) | ||||||
|  |  | ||||||
|         # Delete FileFields separately |         # Delete FileFields separately | ||||||
|         FileField = _import_class('FileField') |         FileField = _import_class("FileField") | ||||||
|         for name, field in iteritems(self._fields): |         for name, field in self._fields.items(): | ||||||
|             if isinstance(field, FileField): |             if isinstance(field, FileField): | ||||||
|                 getattr(self, name).delete() |                 getattr(self, name).delete() | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             self._qs.filter( |             self._qs.filter(**self._object_key).delete( | ||||||
|                 **self._object_key).delete(write_concern=write_concern, _from_doc_delete=True) |                 write_concern=write_concern, _from_doc_delete=True | ||||||
|  |             ) | ||||||
|         except pymongo.errors.OperationFailure as err: |         except pymongo.errors.OperationFailure as err: | ||||||
|             message = u'Could not delete document (%s)' % err.message |             message = "Could not delete document (%s)" % err.message | ||||||
|             raise OperationError(message) |             raise OperationError(message) | ||||||
|         signals.post_delete.send(self.__class__, document=self, **signal_kwargs) |         signals.post_delete.send(self.__class__, document=self, **signal_kwargs) | ||||||
|  |  | ||||||
| @@ -679,7 +708,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|  |  | ||||||
|         .. versionadded:: 0.5 |         .. versionadded:: 0.5 | ||||||
|         """ |         """ | ||||||
|         DeReference = _import_class('DeReference') |         DeReference = _import_class("DeReference") | ||||||
|         DeReference()([self], max_depth + 1) |         DeReference()([self], max_depth + 1) | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
| @@ -697,20 +726,24 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         if fields and isinstance(fields[0], int): |         if fields and isinstance(fields[0], int): | ||||||
|             max_depth = fields[0] |             max_depth = fields[0] | ||||||
|             fields = fields[1:] |             fields = fields[1:] | ||||||
|         elif 'max_depth' in kwargs: |         elif "max_depth" in kwargs: | ||||||
|             max_depth = kwargs['max_depth'] |             max_depth = kwargs["max_depth"] | ||||||
|  |  | ||||||
|         if self.pk is None: |         if self.pk is None: | ||||||
|             raise self.DoesNotExist('Document does not exist') |             raise self.DoesNotExist("Document does not exist") | ||||||
|  |  | ||||||
|         obj = self._qs.read_preference(ReadPreference.PRIMARY).filter( |         obj = ( | ||||||
|             **self._object_key).only(*fields).limit( |             self._qs.read_preference(ReadPreference.PRIMARY) | ||||||
|             1).select_related(max_depth=max_depth) |             .filter(**self._object_key) | ||||||
|  |             .only(*fields) | ||||||
|  |             .limit(1) | ||||||
|  |             .select_related(max_depth=max_depth) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|         if obj: |         if obj: | ||||||
|             obj = obj[0] |             obj = obj[0] | ||||||
|         else: |         else: | ||||||
|             raise self.DoesNotExist('Document does not exist') |             raise self.DoesNotExist("Document does not exist") | ||||||
|         for field in obj._data: |         for field in obj._data: | ||||||
|             if not fields or field in fields: |             if not fields or field in fields: | ||||||
|                 try: |                 try: | ||||||
| @@ -726,9 +759,11 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|                         # i.e. obj.update(unset__field=1) followed by obj.reload() |                         # i.e. obj.update(unset__field=1) followed by obj.reload() | ||||||
|                         delattr(self, field) |                         delattr(self, field) | ||||||
|  |  | ||||||
|         self._changed_fields = list( |         self._changed_fields = ( | ||||||
|             set(self._changed_fields) - set(fields) |             list(set(self._changed_fields) - set(fields)) | ||||||
|         ) if fields else obj._changed_fields |             if fields | ||||||
|  |             else obj._changed_fields | ||||||
|  |         ) | ||||||
|         self._created = False |         self._created = False | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
| @@ -754,7 +789,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         """Returns an instance of :class:`~bson.dbref.DBRef` useful in |         """Returns an instance of :class:`~bson.dbref.DBRef` useful in | ||||||
|         `__raw__` queries.""" |         `__raw__` queries.""" | ||||||
|         if self.pk is None: |         if self.pk is None: | ||||||
|             msg = 'Only saved documents can have a valid dbref' |             msg = "Only saved documents can have a valid dbref" | ||||||
|             raise OperationError(msg) |             raise OperationError(msg) | ||||||
|         return DBRef(self.__class__._get_collection_name(), self.pk) |         return DBRef(self.__class__._get_collection_name(), self.pk) | ||||||
|  |  | ||||||
| @@ -763,18 +798,22 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         """This method registers the delete rules to apply when removing this |         """This method registers the delete rules to apply when removing this | ||||||
|         object. |         object. | ||||||
|         """ |         """ | ||||||
|         classes = [get_document(class_name) |         classes = [ | ||||||
|                    for class_name in cls._subclasses |             get_document(class_name) | ||||||
|                    if class_name != cls.__name__] + [cls] |             for class_name in cls._subclasses | ||||||
|         documents = [get_document(class_name) |             if class_name != cls.__name__ | ||||||
|                      for class_name in document_cls._subclasses |         ] + [cls] | ||||||
|                      if class_name != document_cls.__name__] + [document_cls] |         documents = [ | ||||||
|  |             get_document(class_name) | ||||||
|  |             for class_name in document_cls._subclasses | ||||||
|  |             if class_name != document_cls.__name__ | ||||||
|  |         ] + [document_cls] | ||||||
|  |  | ||||||
|         for klass in classes: |         for klass in classes: | ||||||
|             for document_cls in documents: |             for document_cls in documents: | ||||||
|                 delete_rules = klass._meta.get('delete_rules') or {} |                 delete_rules = klass._meta.get("delete_rules") or {} | ||||||
|                 delete_rules[(document_cls, field_name)] = rule |                 delete_rules[(document_cls, field_name)] = rule | ||||||
|                 klass._meta['delete_rules'] = delete_rules |                 klass._meta["delete_rules"] = delete_rules | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def drop_collection(cls): |     def drop_collection(cls): | ||||||
| @@ -789,8 +828,9 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         """ |         """ | ||||||
|         coll_name = cls._get_collection_name() |         coll_name = cls._get_collection_name() | ||||||
|         if not coll_name: |         if not coll_name: | ||||||
|             raise OperationError('Document %s has no collection defined ' |             raise OperationError( | ||||||
|                                  '(is it abstract ?)' % cls) |                 "Document %s has no collection defined (is it abstract ?)" % cls | ||||||
|  |             ) | ||||||
|         cls._collection = None |         cls._collection = None | ||||||
|         db = cls._get_db() |         db = cls._get_db() | ||||||
|         db.drop_collection(coll_name) |         db.drop_collection(coll_name) | ||||||
| @@ -806,19 +846,14 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         """ |         """ | ||||||
|         index_spec = cls._build_index_spec(keys) |         index_spec = cls._build_index_spec(keys) | ||||||
|         index_spec = index_spec.copy() |         index_spec = index_spec.copy() | ||||||
|         fields = index_spec.pop('fields') |         fields = index_spec.pop("fields") | ||||||
|         drop_dups = kwargs.get('drop_dups', False) |         index_spec["background"] = background | ||||||
|         if drop_dups: |  | ||||||
|             msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' |  | ||||||
|             warnings.warn(msg, DeprecationWarning) |  | ||||||
|         index_spec['background'] = background |  | ||||||
|         index_spec.update(kwargs) |         index_spec.update(kwargs) | ||||||
|  |  | ||||||
|         return cls._get_collection().create_index(fields, **index_spec) |         return cls._get_collection().create_index(fields, **index_spec) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def ensure_index(cls, key_or_list, drop_dups=False, background=False, |     def ensure_index(cls, key_or_list, background=False, **kwargs): | ||||||
|                      **kwargs): |  | ||||||
|         """Ensure that the given indexes are in place. Deprecated in favour |         """Ensure that the given indexes are in place. Deprecated in favour | ||||||
|         of create_index. |         of create_index. | ||||||
|  |  | ||||||
| @@ -826,12 +861,7 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|             construct a multi-field index); keys may be prefixed with a **+** |             construct a multi-field index); keys may be prefixed with a **+** | ||||||
|             or a **-** to determine the index ordering |             or a **-** to determine the index ordering | ||||||
|         :param background: Allows index creation in the background |         :param background: Allows index creation in the background | ||||||
|         :param drop_dups: Was removed/ignored with MongoDB >2.7.5. The value |  | ||||||
|             will be removed if PyMongo3+ is used |  | ||||||
|         """ |         """ | ||||||
|         if drop_dups: |  | ||||||
|             msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' |  | ||||||
|             warnings.warn(msg, DeprecationWarning) |  | ||||||
|         return cls.create_index(key_or_list, background=background, **kwargs) |         return cls.create_index(key_or_list, background=background, **kwargs) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
| @@ -843,13 +873,9 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         .. note:: You can disable automatic index creation by setting |         .. note:: You can disable automatic index creation by setting | ||||||
|                   `auto_create_index` to False in the documents meta data |                   `auto_create_index` to False in the documents meta data | ||||||
|         """ |         """ | ||||||
|         background = cls._meta.get('index_background', False) |         background = cls._meta.get("index_background", False) | ||||||
|         drop_dups = cls._meta.get('index_drop_dups', False) |         index_opts = cls._meta.get("index_opts") or {} | ||||||
|         index_opts = cls._meta.get('index_opts') or {} |         index_cls = cls._meta.get("index_cls", True) | ||||||
|         index_cls = cls._meta.get('index_cls', True) |  | ||||||
|         if drop_dups: |  | ||||||
|             msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' |  | ||||||
|             warnings.warn(msg, DeprecationWarning) |  | ||||||
|  |  | ||||||
|         collection = cls._get_collection() |         collection = cls._get_collection() | ||||||
|         # 746: when connection is via mongos, the read preference is not necessarily an indication that |         # 746: when connection is via mongos, the read preference is not necessarily an indication that | ||||||
| @@ -864,40 +890,39 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         cls_indexed = False |         cls_indexed = False | ||||||
|  |  | ||||||
|         # Ensure document-defined indexes are created |         # Ensure document-defined indexes are created | ||||||
|         if cls._meta['index_specs']: |         if cls._meta["index_specs"]: | ||||||
|             index_spec = cls._meta['index_specs'] |             index_spec = cls._meta["index_specs"] | ||||||
|             for spec in index_spec: |             for spec in index_spec: | ||||||
|                 spec = spec.copy() |                 spec = spec.copy() | ||||||
|                 fields = spec.pop('fields') |                 fields = spec.pop("fields") | ||||||
|                 cls_indexed = cls_indexed or includes_cls(fields) |                 cls_indexed = cls_indexed or includes_cls(fields) | ||||||
|                 opts = index_opts.copy() |                 opts = index_opts.copy() | ||||||
|                 opts.update(spec) |                 opts.update(spec) | ||||||
|  |  | ||||||
|                 # we shouldn't pass 'cls' to the collection.ensureIndex options |                 # we shouldn't pass 'cls' to the collection.ensureIndex options | ||||||
|                 # because of https://jira.mongodb.org/browse/SERVER-769 |                 # because of https://jira.mongodb.org/browse/SERVER-769 | ||||||
|                 if 'cls' in opts: |                 if "cls" in opts: | ||||||
|                     del opts['cls'] |                     del opts["cls"] | ||||||
|  |  | ||||||
|                 collection.create_index(fields, background=background, **opts) |                 collection.create_index(fields, background=background, **opts) | ||||||
|  |  | ||||||
|         # If _cls is being used (for polymorphism), it needs an index, |         # If _cls is being used (for polymorphism), it needs an index, | ||||||
|         # only if another index doesn't begin with _cls |         # only if another index doesn't begin with _cls | ||||||
|         if index_cls and not cls_indexed and cls._meta.get('allow_inheritance'): |         if index_cls and not cls_indexed and cls._meta.get("allow_inheritance"): | ||||||
|  |  | ||||||
|             # we shouldn't pass 'cls' to the collection.ensureIndex options |             # we shouldn't pass 'cls' to the collection.ensureIndex options | ||||||
|             # because of https://jira.mongodb.org/browse/SERVER-769 |             # because of https://jira.mongodb.org/browse/SERVER-769 | ||||||
|             if 'cls' in index_opts: |             if "cls" in index_opts: | ||||||
|                 del index_opts['cls'] |                 del index_opts["cls"] | ||||||
|  |  | ||||||
|             collection.create_index('_cls', background=background, |             collection.create_index("_cls", background=background, **index_opts) | ||||||
|                                     **index_opts) |  | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def list_indexes(cls): |     def list_indexes(cls): | ||||||
|         """ Lists all of the indexes that should be created for given |         """ Lists all of the indexes that should be created for given | ||||||
|         collection. It includes all the indexes from super- and sub-classes. |         collection. It includes all the indexes from super- and sub-classes. | ||||||
|         """ |         """ | ||||||
|         if cls._meta.get('abstract'): |         if cls._meta.get("abstract"): | ||||||
|             return [] |             return [] | ||||||
|  |  | ||||||
|         # get all the base classes, subclasses and siblings |         # get all the base classes, subclasses and siblings | ||||||
| @@ -905,22 +930,27 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|  |  | ||||||
|         def get_classes(cls): |         def get_classes(cls): | ||||||
|  |  | ||||||
|             if (cls not in classes and |             if cls not in classes and isinstance(cls, TopLevelDocumentMetaclass): | ||||||
|                     isinstance(cls, TopLevelDocumentMetaclass)): |  | ||||||
|                 classes.append(cls) |                 classes.append(cls) | ||||||
|  |  | ||||||
|             for base_cls in cls.__bases__: |             for base_cls in cls.__bases__: | ||||||
|                 if (isinstance(base_cls, TopLevelDocumentMetaclass) and |                 if ( | ||||||
|                         base_cls != Document and |                     isinstance(base_cls, TopLevelDocumentMetaclass) | ||||||
|                         not base_cls._meta.get('abstract') and |                     and base_cls != Document | ||||||
|                         base_cls._get_collection().full_name == cls._get_collection().full_name and |                     and not base_cls._meta.get("abstract") | ||||||
|                         base_cls not in classes): |                     and base_cls._get_collection().full_name | ||||||
|  |                     == cls._get_collection().full_name | ||||||
|  |                     and base_cls not in classes | ||||||
|  |                 ): | ||||||
|                     classes.append(base_cls) |                     classes.append(base_cls) | ||||||
|                     get_classes(base_cls) |                     get_classes(base_cls) | ||||||
|             for subclass in cls.__subclasses__(): |             for subclass in cls.__subclasses__(): | ||||||
|                 if (isinstance(base_cls, TopLevelDocumentMetaclass) and |                 if ( | ||||||
|                         subclass._get_collection().full_name == cls._get_collection().full_name and |                     isinstance(base_cls, TopLevelDocumentMetaclass) | ||||||
|                         subclass not in classes): |                     and subclass._get_collection().full_name | ||||||
|  |                     == cls._get_collection().full_name | ||||||
|  |                     and subclass not in classes | ||||||
|  |                 ): | ||||||
|                     classes.append(subclass) |                     classes.append(subclass) | ||||||
|                     get_classes(subclass) |                     get_classes(subclass) | ||||||
|  |  | ||||||
| @@ -930,11 +960,11 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|         def get_indexes_spec(cls): |         def get_indexes_spec(cls): | ||||||
|             indexes = [] |             indexes = [] | ||||||
|  |  | ||||||
|             if cls._meta['index_specs']: |             if cls._meta["index_specs"]: | ||||||
|                 index_spec = cls._meta['index_specs'] |                 index_spec = cls._meta["index_specs"] | ||||||
|                 for spec in index_spec: |                 for spec in index_spec: | ||||||
|                     spec = spec.copy() |                     spec = spec.copy() | ||||||
|                     fields = spec.pop('fields') |                     fields = spec.pop("fields") | ||||||
|                     indexes.append(fields) |                     indexes.append(fields) | ||||||
|             return indexes |             return indexes | ||||||
|  |  | ||||||
| @@ -945,10 +975,10 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|                     indexes.append(index) |                     indexes.append(index) | ||||||
|  |  | ||||||
|         # finish up by appending { '_id': 1 } and { '_cls': 1 }, if needed |         # finish up by appending { '_id': 1 } and { '_cls': 1 }, if needed | ||||||
|         if [(u'_id', 1)] not in indexes: |         if [("_id", 1)] not in indexes: | ||||||
|             indexes.append([(u'_id', 1)]) |             indexes.append([("_id", 1)]) | ||||||
|         if cls._meta.get('index_cls', True) and cls._meta.get('allow_inheritance'): |         if cls._meta.get("index_cls", True) and cls._meta.get("allow_inheritance"): | ||||||
|             indexes.append([(u'_cls', 1)]) |             indexes.append([("_cls", 1)]) | ||||||
|  |  | ||||||
|         return indexes |         return indexes | ||||||
|  |  | ||||||
| @@ -962,30 +992,29 @@ class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | |||||||
|  |  | ||||||
|         existing = [] |         existing = [] | ||||||
|         for info in cls._get_collection().index_information().values(): |         for info in cls._get_collection().index_information().values(): | ||||||
|             if '_fts' in info['key'][0]: |             if "_fts" in info["key"][0]: | ||||||
|                 index_type = info['key'][0][1] |                 index_type = info["key"][0][1] | ||||||
|                 text_index_fields = info.get('weights').keys() |                 text_index_fields = info.get("weights").keys() | ||||||
|                 existing.append( |                 existing.append([(key, index_type) for key in text_index_fields]) | ||||||
|                     [(key, index_type) for key in text_index_fields]) |  | ||||||
|             else: |             else: | ||||||
|                 existing.append(info['key']) |                 existing.append(info["key"]) | ||||||
|         missing = [index for index in required if index not in existing] |         missing = [index for index in required if index not in existing] | ||||||
|         extra = [index for index in existing if index not in required] |         extra = [index for index in existing if index not in required] | ||||||
|  |  | ||||||
|         # if { _cls: 1 } is missing, make sure it's *really* necessary |         # if { _cls: 1 } is missing, make sure it's *really* necessary | ||||||
|         if [(u'_cls', 1)] in missing: |         if [("_cls", 1)] in missing: | ||||||
|             cls_obsolete = False |             cls_obsolete = False | ||||||
|             for index in existing: |             for index in existing: | ||||||
|                 if includes_cls(index) and index not in extra: |                 if includes_cls(index) and index not in extra: | ||||||
|                     cls_obsolete = True |                     cls_obsolete = True | ||||||
|                     break |                     break | ||||||
|             if cls_obsolete: |             if cls_obsolete: | ||||||
|                 missing.remove([(u'_cls', 1)]) |                 missing.remove([("_cls", 1)]) | ||||||
|  |  | ||||||
|         return {'missing': missing, 'extra': extra} |         return {"missing": missing, "extra": extra} | ||||||
|  |  | ||||||
|  |  | ||||||
| class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): | class DynamicDocument(Document, metaclass=TopLevelDocumentMetaclass): | ||||||
|     """A Dynamic Document class allowing flexible, expandable and uncontrolled |     """A Dynamic Document class allowing flexible, expandable and uncontrolled | ||||||
|     schemas.  As a :class:`~mongoengine.Document` subclass, acts in the same |     schemas.  As a :class:`~mongoengine.Document` subclass, acts in the same | ||||||
|     way as an ordinary document but has expanded style properties.  Any data |     way as an ordinary document but has expanded style properties.  Any data | ||||||
| @@ -999,7 +1028,6 @@ class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): | |||||||
|         There is one caveat on Dynamic Documents: undeclared fields cannot start with `_` |         There is one caveat on Dynamic Documents: undeclared fields cannot start with `_` | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     # The __metaclass__ attribute is removed by 2to3 when running with Python3 |  | ||||||
|     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 |     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||||
|     my_metaclass = TopLevelDocumentMetaclass |     my_metaclass = TopLevelDocumentMetaclass | ||||||
|  |  | ||||||
| @@ -1014,16 +1042,15 @@ class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): | |||||||
|             setattr(self, field_name, None) |             setattr(self, field_name, None) | ||||||
|             self._dynamic_fields[field_name].null = False |             self._dynamic_fields[field_name].null = False | ||||||
|         else: |         else: | ||||||
|             super(DynamicDocument, self).__delattr__(*args, **kwargs) |             super().__delattr__(*args, **kwargs) | ||||||
|  |  | ||||||
|  |  | ||||||
| class DynamicEmbeddedDocument(six.with_metaclass(DocumentMetaclass, EmbeddedDocument)): | class DynamicEmbeddedDocument(EmbeddedDocument, metaclass=DocumentMetaclass): | ||||||
|     """A Dynamic Embedded Document class allowing flexible, expandable and |     """A Dynamic Embedded Document class allowing flexible, expandable and | ||||||
|     uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more |     uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more | ||||||
|     information about dynamic documents. |     information about dynamic documents. | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     # The __metaclass__ attribute is removed by 2to3 when running with Python3 |  | ||||||
|     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 |     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||||
|     my_metaclass = DocumentMetaclass |     my_metaclass = DocumentMetaclass | ||||||
|  |  | ||||||
| @@ -1043,7 +1070,7 @@ class DynamicEmbeddedDocument(six.with_metaclass(DocumentMetaclass, EmbeddedDocu | |||||||
|             setattr(self, field_name, None) |             setattr(self, field_name, None) | ||||||
|  |  | ||||||
|  |  | ||||||
| class MapReduceDocument(object): | class MapReduceDocument: | ||||||
|     """A document returned from a map/reduce query. |     """A document returned from a map/reduce query. | ||||||
|  |  | ||||||
|     :param collection: An instance of :class:`~pymongo.Collection` |     :param collection: An instance of :class:`~pymongo.Collection` | ||||||
| @@ -1067,17 +1094,16 @@ class MapReduceDocument(object): | |||||||
|         """Lazy-load the object referenced by ``self.key``. ``self.key`` |         """Lazy-load the object referenced by ``self.key``. ``self.key`` | ||||||
|         should be the ``primary_key``. |         should be the ``primary_key``. | ||||||
|         """ |         """ | ||||||
|         id_field = self._document()._meta['id_field'] |         id_field = self._document()._meta["id_field"] | ||||||
|         id_field_type = type(id_field) |         id_field_type = type(id_field) | ||||||
|  |  | ||||||
|         if not isinstance(self.key, id_field_type): |         if not isinstance(self.key, id_field_type): | ||||||
|             try: |             try: | ||||||
|                 self.key = id_field_type(self.key) |                 self.key = id_field_type(self.key) | ||||||
|             except Exception: |             except Exception: | ||||||
|                 raise Exception('Could not cast key as %s' % |                 raise Exception("Could not cast key as %s" % id_field_type.__name__) | ||||||
|                                 id_field_type.__name__) |  | ||||||
|  |  | ||||||
|         if not hasattr(self, '_key_object'): |         if not hasattr(self, "_key_object"): | ||||||
|             self._key_object = self._document.objects.with_id(self.key) |             self._key_object = self._document.objects.with_id(self.key) | ||||||
|             return self._key_object |             return self._key_object | ||||||
|         return self._key_object |         return self._key_object | ||||||
|   | |||||||
| @@ -1,12 +1,21 @@ | |||||||
| from collections import defaultdict | from collections import defaultdict | ||||||
|  |  | ||||||
| import six |  | ||||||
| from six import iteritems |  | ||||||
|  |  | ||||||
| __all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError', | __all__ = ( | ||||||
|            'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError', |     "NotRegistered", | ||||||
|            'OperationError', 'NotUniqueError', 'FieldDoesNotExist', |     "InvalidDocumentError", | ||||||
|            'ValidationError', 'SaveConditionError', 'DeprecatedError') |     "LookUpError", | ||||||
|  |     "DoesNotExist", | ||||||
|  |     "MultipleObjectsReturned", | ||||||
|  |     "InvalidQueryError", | ||||||
|  |     "OperationError", | ||||||
|  |     "NotUniqueError", | ||||||
|  |     "BulkWriteError", | ||||||
|  |     "FieldDoesNotExist", | ||||||
|  |     "ValidationError", | ||||||
|  |     "SaveConditionError", | ||||||
|  |     "DeprecatedError", | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
| class NotRegistered(Exception): | class NotRegistered(Exception): | ||||||
| @@ -41,6 +50,10 @@ class NotUniqueError(OperationError): | |||||||
|     pass |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class BulkWriteError(OperationError): | ||||||
|  |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
| class SaveConditionError(OperationError): | class SaveConditionError(OperationError): | ||||||
|     pass |     pass | ||||||
|  |  | ||||||
| @@ -71,25 +84,25 @@ class ValidationError(AssertionError): | |||||||
|     field_name = None |     field_name = None | ||||||
|     _message = None |     _message = None | ||||||
|  |  | ||||||
|     def __init__(self, message='', **kwargs): |     def __init__(self, message="", **kwargs): | ||||||
|         super(ValidationError, self).__init__(message) |         super().__init__(message) | ||||||
|         self.errors = kwargs.get('errors', {}) |         self.errors = kwargs.get("errors", {}) | ||||||
|         self.field_name = kwargs.get('field_name') |         self.field_name = kwargs.get("field_name") | ||||||
|         self.message = message |         self.message = message | ||||||
|  |  | ||||||
|     def __str__(self): |     def __str__(self): | ||||||
|         return six.text_type(self.message) |         return str(self.message) | ||||||
|  |  | ||||||
|     def __repr__(self): |     def __repr__(self): | ||||||
|         return '%s(%s,)' % (self.__class__.__name__, self.message) |         return "{}({},)".format(self.__class__.__name__, self.message) | ||||||
|  |  | ||||||
|     def __getattribute__(self, name): |     def __getattribute__(self, name): | ||||||
|         message = super(ValidationError, self).__getattribute__(name) |         message = super().__getattribute__(name) | ||||||
|         if name == 'message': |         if name == "message": | ||||||
|             if self.field_name: |             if self.field_name: | ||||||
|                 message = '%s' % message |                 message = "%s" % message | ||||||
|             if self.errors: |             if self.errors: | ||||||
|                 message = '%s(%s)' % (message, self._format_errors()) |                 message = "{}({})".format(message, self._format_errors()) | ||||||
|         return message |         return message | ||||||
|  |  | ||||||
|     def _get_message(self): |     def _get_message(self): | ||||||
| @@ -111,12 +124,12 @@ class ValidationError(AssertionError): | |||||||
|         def build_dict(source): |         def build_dict(source): | ||||||
|             errors_dict = {} |             errors_dict = {} | ||||||
|             if isinstance(source, dict): |             if isinstance(source, dict): | ||||||
|                 for field_name, error in iteritems(source): |                 for field_name, error in source.items(): | ||||||
|                     errors_dict[field_name] = build_dict(error) |                     errors_dict[field_name] = build_dict(error) | ||||||
|             elif isinstance(source, ValidationError) and source.errors: |             elif isinstance(source, ValidationError) and source.errors: | ||||||
|                 return build_dict(source.errors) |                 return build_dict(source.errors) | ||||||
|             else: |             else: | ||||||
|                 return six.text_type(source) |                 return str(source) | ||||||
|  |  | ||||||
|             return errors_dict |             return errors_dict | ||||||
|  |  | ||||||
| @@ -128,22 +141,22 @@ class ValidationError(AssertionError): | |||||||
|     def _format_errors(self): |     def _format_errors(self): | ||||||
|         """Returns a string listing all errors within a document""" |         """Returns a string listing all errors within a document""" | ||||||
|  |  | ||||||
|         def generate_key(value, prefix=''): |         def generate_key(value, prefix=""): | ||||||
|             if isinstance(value, list): |             if isinstance(value, list): | ||||||
|                 value = ' '.join([generate_key(k) for k in value]) |                 value = " ".join([generate_key(k) for k in value]) | ||||||
|             elif isinstance(value, dict): |             elif isinstance(value, dict): | ||||||
|                 value = ' '.join( |                 value = " ".join([generate_key(v, k) for k, v in value.items()]) | ||||||
|                     [generate_key(v, k) for k, v in iteritems(value)]) |  | ||||||
|  |  | ||||||
|             results = '%s.%s' % (prefix, value) if prefix else value |             results = "{}.{}".format(prefix, value) if prefix else value | ||||||
|             return results |             return results | ||||||
|  |  | ||||||
|         error_dict = defaultdict(list) |         error_dict = defaultdict(list) | ||||||
|         for k, v in iteritems(self.to_dict()): |         for k, v in self.to_dict().items(): | ||||||
|             error_dict[generate_key(v)].append(k) |             error_dict[generate_key(v)].append(k) | ||||||
|         return ' '.join(['%s: %s' % (k, v) for k, v in iteritems(error_dict)]) |         return " ".join(["{}: {}".format(k, v) for k, v in error_dict.items()]) | ||||||
|  |  | ||||||
|  |  | ||||||
| class DeprecatedError(Exception): | class DeprecatedError(Exception): | ||||||
|     """Raise when a user uses a feature that has been Deprecated""" |     """Raise when a user uses a feature that has been Deprecated""" | ||||||
|  |  | ||||||
|     pass |     pass | ||||||
|   | |||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -11,9 +11,9 @@ MONGODB_36 = (3, 6) | |||||||
|  |  | ||||||
|  |  | ||||||
| def get_mongodb_version(): | def get_mongodb_version(): | ||||||
|     """Return the version of the connected mongoDB (first 2 digits) |     """Return the version of the default connected mongoDB (first 2 digits) | ||||||
|  |  | ||||||
|     :return: tuple(int, int) |     :return: tuple(int, int) | ||||||
|     """ |     """ | ||||||
|     version_list = get_connection().server_info()['versionArray'][:2]     # e.g: (3, 2) |     version_list = get_connection().server_info()["versionArray"][:2]  # e.g: (3, 2) | ||||||
|     return tuple(version_list) |     return tuple(version_list) | ||||||
|   | |||||||
| @@ -27,6 +27,6 @@ def list_collection_names(db, include_system_collections=False): | |||||||
|         collections = db.collection_names() |         collections = db.collection_names() | ||||||
|  |  | ||||||
|     if not include_system_collections: |     if not include_system_collections: | ||||||
|         collections = [c for c in collections if not c.startswith('system.')] |         collections = [c for c in collections if not c.startswith("system.")] | ||||||
|  |  | ||||||
|     return collections |     return collections | ||||||
|   | |||||||
| @@ -1,23 +0,0 @@ | |||||||
| """ |  | ||||||
| Helper functions, constants, and types to aid with Python v2.7 - v3.x support |  | ||||||
| """ |  | ||||||
| import six |  | ||||||
|  |  | ||||||
| # six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3. |  | ||||||
| StringIO = six.BytesIO |  | ||||||
|  |  | ||||||
| # Additionally for Py2, try to use the faster cStringIO, if available |  | ||||||
| if not six.PY3: |  | ||||||
|     try: |  | ||||||
|         import cStringIO |  | ||||||
|     except ImportError: |  | ||||||
|         pass |  | ||||||
|     else: |  | ||||||
|         StringIO = cStringIO.StringIO |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if six.PY3: |  | ||||||
|     from collections.abc import Hashable |  | ||||||
| else: |  | ||||||
|     # raises DeprecationWarnings in Python >=3.7 |  | ||||||
|     from collections import Hashable |  | ||||||
| @@ -7,11 +7,22 @@ from mongoengine.queryset.visitor import * | |||||||
|  |  | ||||||
| # Expose just the public subset of all imported objects and constants. | # Expose just the public subset of all imported objects and constants. | ||||||
| __all__ = ( | __all__ = ( | ||||||
|     'QuerySet', 'QuerySetNoCache', 'Q', 'queryset_manager', 'QuerySetManager', |     "QuerySet", | ||||||
|     'QueryFieldList', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL', |     "QuerySetNoCache", | ||||||
|  |     "Q", | ||||||
|  |     "queryset_manager", | ||||||
|  |     "QuerySetManager", | ||||||
|  |     "QueryFieldList", | ||||||
|  |     "DO_NOTHING", | ||||||
|  |     "NULLIFY", | ||||||
|  |     "CASCADE", | ||||||
|  |     "DENY", | ||||||
|  |     "PULL", | ||||||
|     # Errors that might be related to a queryset, mostly here for backward |     # Errors that might be related to a queryset, mostly here for backward | ||||||
|     # compatibility |     # compatibility | ||||||
|     'DoesNotExist', 'InvalidQueryError', 'MultipleObjectsReturned', |     "DoesNotExist", | ||||||
|     'NotUniqueError', 'OperationError', |     "InvalidQueryError", | ||||||
|  |     "MultipleObjectsReturned", | ||||||
|  |     "NotUniqueError", | ||||||
|  |     "OperationError", | ||||||
| ) | ) | ||||||
|   | |||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,12 +1,15 @@ | |||||||
| __all__ = ('QueryFieldList',) | __all__ = ("QueryFieldList",) | ||||||
|  |  | ||||||
|  |  | ||||||
| class QueryFieldList(object): | class QueryFieldList: | ||||||
|     """Object that handles combinations of .only() and .exclude() calls""" |     """Object that handles combinations of .only() and .exclude() calls""" | ||||||
|  |  | ||||||
|     ONLY = 1 |     ONLY = 1 | ||||||
|     EXCLUDE = 0 |     EXCLUDE = 0 | ||||||
|  |  | ||||||
|     def __init__(self, fields=None, value=ONLY, always_include=None, _only_called=False): |     def __init__( | ||||||
|  |         self, fields=None, value=ONLY, always_include=None, _only_called=False | ||||||
|  |     ): | ||||||
|         """The QueryFieldList builder |         """The QueryFieldList builder | ||||||
|  |  | ||||||
|         :param fields: A list of fields used in `.only()` or `.exclude()` |         :param fields: A list of fields used in `.only()` or `.exclude()` | ||||||
| @@ -49,7 +52,7 @@ class QueryFieldList(object): | |||||||
|             self.fields = f.fields - self.fields |             self.fields = f.fields - self.fields | ||||||
|             self._clean_slice() |             self._clean_slice() | ||||||
|  |  | ||||||
|         if '_id' in f.fields: |         if "_id" in f.fields: | ||||||
|             self._id = f.value |             self._id = f.value | ||||||
|  |  | ||||||
|         if self.always_include: |         if self.always_include: | ||||||
| @@ -59,25 +62,23 @@ class QueryFieldList(object): | |||||||
|             else: |             else: | ||||||
|                 self.fields -= self.always_include |                 self.fields -= self.always_include | ||||||
|  |  | ||||||
|         if getattr(f, '_only_called', False): |         if getattr(f, "_only_called", False): | ||||||
|             self._only_called = True |             self._only_called = True | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
|     def __bool__(self): |     def __bool__(self): | ||||||
|         return bool(self.fields) |         return bool(self.fields) | ||||||
|  |  | ||||||
|     __nonzero__ = __bool__  # For Py2 support |  | ||||||
|  |  | ||||||
|     def as_dict(self): |     def as_dict(self): | ||||||
|         field_list = {field: self.value for field in self.fields} |         field_list = {field: self.value for field in self.fields} | ||||||
|         if self.slice: |         if self.slice: | ||||||
|             field_list.update(self.slice) |             field_list.update(self.slice) | ||||||
|         if self._id is not None: |         if self._id is not None: | ||||||
|             field_list['_id'] = self._id |             field_list["_id"] = self._id | ||||||
|         return field_list |         return field_list | ||||||
|  |  | ||||||
|     def reset(self): |     def reset(self): | ||||||
|         self.fields = set([]) |         self.fields = set() | ||||||
|         self.slice = {} |         self.slice = {} | ||||||
|         self.value = self.ONLY |         self.value = self.ONLY | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,10 +1,10 @@ | |||||||
| from functools import partial | from functools import partial | ||||||
| from mongoengine.queryset.queryset import QuerySet | from mongoengine.queryset.queryset import QuerySet | ||||||
|  |  | ||||||
| __all__ = ('queryset_manager', 'QuerySetManager') | __all__ = ("queryset_manager", "QuerySetManager") | ||||||
|  |  | ||||||
|  |  | ||||||
| class QuerySetManager(object): | class QuerySetManager: | ||||||
|     """ |     """ | ||||||
|     The default QuerySet Manager. |     The default QuerySet Manager. | ||||||
|  |  | ||||||
| @@ -33,7 +33,7 @@ class QuerySetManager(object): | |||||||
|             return self |             return self | ||||||
|  |  | ||||||
|         # owner is the document that contains the QuerySetManager |         # owner is the document that contains the QuerySetManager | ||||||
|         queryset_class = owner._meta.get('queryset_class', self.default) |         queryset_class = owner._meta.get("queryset_class", self.default) | ||||||
|         queryset = queryset_class(owner, owner._get_collection()) |         queryset = queryset_class(owner, owner._get_collection()) | ||||||
|         if self.get_queryset: |         if self.get_queryset: | ||||||
|             arg_count = self.get_queryset.__code__.co_argcount |             arg_count = self.get_queryset.__code__.co_argcount | ||||||
|   | |||||||
| @@ -1,11 +1,22 @@ | |||||||
| import six |  | ||||||
|  |  | ||||||
| from mongoengine.errors import OperationError | from mongoengine.errors import OperationError | ||||||
| from mongoengine.queryset.base import (BaseQuerySet, CASCADE, DENY, DO_NOTHING, | from mongoengine.queryset.base import ( | ||||||
|                                        NULLIFY, PULL) |     BaseQuerySet, | ||||||
|  |     CASCADE, | ||||||
|  |     DENY, | ||||||
|  |     DO_NOTHING, | ||||||
|  |     NULLIFY, | ||||||
|  |     PULL, | ||||||
|  | ) | ||||||
|  |  | ||||||
| __all__ = ('QuerySet', 'QuerySetNoCache', 'DO_NOTHING', 'NULLIFY', 'CASCADE', | __all__ = ( | ||||||
|            'DENY', 'PULL') |     "QuerySet", | ||||||
|  |     "QuerySetNoCache", | ||||||
|  |     "DO_NOTHING", | ||||||
|  |     "NULLIFY", | ||||||
|  |     "CASCADE", | ||||||
|  |     "DENY", | ||||||
|  |     "PULL", | ||||||
|  | ) | ||||||
|  |  | ||||||
| # The maximum number of items to display in a QuerySet.__repr__ | # The maximum number of items to display in a QuerySet.__repr__ | ||||||
| REPR_OUTPUT_SIZE = 20 | REPR_OUTPUT_SIZE = 20 | ||||||
| @@ -57,12 +68,12 @@ class QuerySet(BaseQuerySet): | |||||||
|     def __repr__(self): |     def __repr__(self): | ||||||
|         """Provide a string representation of the QuerySet""" |         """Provide a string representation of the QuerySet""" | ||||||
|         if self._iter: |         if self._iter: | ||||||
|             return '.. queryset mid-iteration ..' |             return ".. queryset mid-iteration .." | ||||||
|  |  | ||||||
|         self._populate_cache() |         self._populate_cache() | ||||||
|         data = self._result_cache[:REPR_OUTPUT_SIZE + 1] |         data = self._result_cache[: REPR_OUTPUT_SIZE + 1] | ||||||
|         if len(data) > REPR_OUTPUT_SIZE: |         if len(data) > REPR_OUTPUT_SIZE: | ||||||
|             data[-1] = '...(remaining elements truncated)...' |             data[-1] = "...(remaining elements truncated)..." | ||||||
|         return repr(data) |         return repr(data) | ||||||
|  |  | ||||||
|     def _iter_results(self): |     def _iter_results(self): | ||||||
| @@ -114,8 +125,8 @@ class QuerySet(BaseQuerySet): | |||||||
|         # Pull in ITER_CHUNK_SIZE docs from the database and store them in |         # Pull in ITER_CHUNK_SIZE docs from the database and store them in | ||||||
|         # the result cache. |         # the result cache. | ||||||
|         try: |         try: | ||||||
|             for _ in six.moves.range(ITER_CHUNK_SIZE): |             for _ in range(ITER_CHUNK_SIZE): | ||||||
|                 self._result_cache.append(six.next(self)) |                 self._result_cache.append(next(self)) | ||||||
|         except StopIteration: |         except StopIteration: | ||||||
|             # Getting this exception means there are no more docs in the |             # Getting this exception means there are no more docs in the | ||||||
|             # db cursor. Set _has_more to False so that we can use that |             # db cursor. Set _has_more to False so that we can use that | ||||||
| @@ -130,10 +141,10 @@ class QuerySet(BaseQuerySet): | |||||||
|             getting the count |             getting the count | ||||||
|         """ |         """ | ||||||
|         if with_limit_and_skip is False: |         if with_limit_and_skip is False: | ||||||
|             return super(QuerySet, self).count(with_limit_and_skip) |             return super().count(with_limit_and_skip) | ||||||
|  |  | ||||||
|         if self._len is None: |         if self._len is None: | ||||||
|             self._len = super(QuerySet, self).count(with_limit_and_skip) |             self._len = super().count(with_limit_and_skip) | ||||||
|  |  | ||||||
|         return self._len |         return self._len | ||||||
|  |  | ||||||
| @@ -143,10 +154,9 @@ class QuerySet(BaseQuerySet): | |||||||
|         .. versionadded:: 0.8.3 Convert to non caching queryset |         .. versionadded:: 0.8.3 Convert to non caching queryset | ||||||
|         """ |         """ | ||||||
|         if self._result_cache is not None: |         if self._result_cache is not None: | ||||||
|             raise OperationError('QuerySet already cached') |             raise OperationError("QuerySet already cached") | ||||||
|  |  | ||||||
|         return self._clone_into(QuerySetNoCache(self._document, |         return self._clone_into(QuerySetNoCache(self._document, self._collection)) | ||||||
|                                                 self._collection)) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class QuerySetNoCache(BaseQuerySet): | class QuerySetNoCache(BaseQuerySet): | ||||||
| @@ -165,17 +175,17 @@ class QuerySetNoCache(BaseQuerySet): | |||||||
|         .. versionchanged:: 0.6.13 Now doesnt modify the cursor |         .. versionchanged:: 0.6.13 Now doesnt modify the cursor | ||||||
|         """ |         """ | ||||||
|         if self._iter: |         if self._iter: | ||||||
|             return '.. queryset mid-iteration ..' |             return ".. queryset mid-iteration .." | ||||||
|  |  | ||||||
|         data = [] |         data = [] | ||||||
|         for _ in six.moves.range(REPR_OUTPUT_SIZE + 1): |         for _ in range(REPR_OUTPUT_SIZE + 1): | ||||||
|             try: |             try: | ||||||
|                 data.append(six.next(self)) |                 data.append(next(self)) | ||||||
|             except StopIteration: |             except StopIteration: | ||||||
|                 break |                 break | ||||||
|  |  | ||||||
|         if len(data) > REPR_OUTPUT_SIZE: |         if len(data) > REPR_OUTPUT_SIZE: | ||||||
|             data[-1] = '...(remaining elements truncated)...' |             data[-1] = "...(remaining elements truncated)..." | ||||||
|  |  | ||||||
|         self.rewind() |         self.rewind() | ||||||
|         return repr(data) |         return repr(data) | ||||||
|   | |||||||
| @@ -3,28 +3,59 @@ from collections import defaultdict | |||||||
| from bson import ObjectId, SON | from bson import ObjectId, SON | ||||||
| from bson.dbref import DBRef | from bson.dbref import DBRef | ||||||
| import pymongo | import pymongo | ||||||
| import six |  | ||||||
| from six import iteritems |  | ||||||
|  |  | ||||||
| from mongoengine.base import UPDATE_OPERATORS | from mongoengine.base import UPDATE_OPERATORS | ||||||
| from mongoengine.common import _import_class | from mongoengine.common import _import_class | ||||||
| from mongoengine.errors import InvalidQueryError | from mongoengine.errors import InvalidQueryError | ||||||
|  |  | ||||||
| __all__ = ('query', 'update') | __all__ = ("query", "update", "STRING_OPERATORS") | ||||||
|  |  | ||||||
| COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod', | COMPARISON_OPERATORS = ( | ||||||
|                         'all', 'size', 'exists', 'not', 'elemMatch', 'type') |     "ne", | ||||||
| GEO_OPERATORS = ('within_distance', 'within_spherical_distance', |     "gt", | ||||||
|                  'within_box', 'within_polygon', 'near', 'near_sphere', |     "gte", | ||||||
|                  'max_distance', 'min_distance', 'geo_within', 'geo_within_box', |     "lt", | ||||||
|                  'geo_within_polygon', 'geo_within_center', |     "lte", | ||||||
|                  'geo_within_sphere', 'geo_intersects') |     "in", | ||||||
| STRING_OPERATORS = ('contains', 'icontains', 'startswith', |     "nin", | ||||||
|                     'istartswith', 'endswith', 'iendswith', |     "mod", | ||||||
|                     'exact', 'iexact') |     "all", | ||||||
| CUSTOM_OPERATORS = ('match',) |     "size", | ||||||
| MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS + |     "exists", | ||||||
|                    STRING_OPERATORS + CUSTOM_OPERATORS) |     "not", | ||||||
|  |     "elemMatch", | ||||||
|  |     "type", | ||||||
|  | ) | ||||||
|  | GEO_OPERATORS = ( | ||||||
|  |     "within_distance", | ||||||
|  |     "within_spherical_distance", | ||||||
|  |     "within_box", | ||||||
|  |     "within_polygon", | ||||||
|  |     "near", | ||||||
|  |     "near_sphere", | ||||||
|  |     "max_distance", | ||||||
|  |     "min_distance", | ||||||
|  |     "geo_within", | ||||||
|  |     "geo_within_box", | ||||||
|  |     "geo_within_polygon", | ||||||
|  |     "geo_within_center", | ||||||
|  |     "geo_within_sphere", | ||||||
|  |     "geo_intersects", | ||||||
|  | ) | ||||||
|  | STRING_OPERATORS = ( | ||||||
|  |     "contains", | ||||||
|  |     "icontains", | ||||||
|  |     "startswith", | ||||||
|  |     "istartswith", | ||||||
|  |     "endswith", | ||||||
|  |     "iendswith", | ||||||
|  |     "exact", | ||||||
|  |     "iexact", | ||||||
|  | ) | ||||||
|  | CUSTOM_OPERATORS = ("match",) | ||||||
|  | MATCH_OPERATORS = ( | ||||||
|  |     COMPARISON_OPERATORS + GEO_OPERATORS + STRING_OPERATORS + CUSTOM_OPERATORS | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
| # TODO make this less complex | # TODO make this less complex | ||||||
| @@ -33,11 +64,11 @@ def query(_doc_cls=None, **kwargs): | |||||||
|     mongo_query = {} |     mongo_query = {} | ||||||
|     merge_query = defaultdict(list) |     merge_query = defaultdict(list) | ||||||
|     for key, value in sorted(kwargs.items()): |     for key, value in sorted(kwargs.items()): | ||||||
|         if key == '__raw__': |         if key == "__raw__": | ||||||
|             mongo_query.update(value) |             mongo_query.update(value) | ||||||
|             continue |             continue | ||||||
|  |  | ||||||
|         parts = key.rsplit('__') |         parts = key.rsplit("__") | ||||||
|         indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()] |         indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()] | ||||||
|         parts = [part for part in parts if not part.isdigit()] |         parts = [part for part in parts if not part.isdigit()] | ||||||
|         # Check for an operator and transform to mongo-style if there is |         # Check for an operator and transform to mongo-style if there is | ||||||
| @@ -46,11 +77,11 @@ def query(_doc_cls=None, **kwargs): | |||||||
|             op = parts.pop() |             op = parts.pop() | ||||||
|  |  | ||||||
|         # Allow to escape operator-like field name by __ |         # Allow to escape operator-like field name by __ | ||||||
|         if len(parts) > 1 and parts[-1] == '': |         if len(parts) > 1 and parts[-1] == "": | ||||||
|             parts.pop() |             parts.pop() | ||||||
|  |  | ||||||
|         negate = False |         negate = False | ||||||
|         if len(parts) > 1 and parts[-1] == 'not': |         if len(parts) > 1 and parts[-1] == "not": | ||||||
|             parts.pop() |             parts.pop() | ||||||
|             negate = True |             negate = True | ||||||
|  |  | ||||||
| @@ -62,18 +93,18 @@ def query(_doc_cls=None, **kwargs): | |||||||
|                 raise InvalidQueryError(e) |                 raise InvalidQueryError(e) | ||||||
|             parts = [] |             parts = [] | ||||||
|  |  | ||||||
|             CachedReferenceField = _import_class('CachedReferenceField') |             CachedReferenceField = _import_class("CachedReferenceField") | ||||||
|             GenericReferenceField = _import_class('GenericReferenceField') |             GenericReferenceField = _import_class("GenericReferenceField") | ||||||
|  |  | ||||||
|             cleaned_fields = [] |             cleaned_fields = [] | ||||||
|             for field in fields: |             for field in fields: | ||||||
|                 append_field = True |                 append_field = True | ||||||
|                 if isinstance(field, six.string_types): |                 if isinstance(field, str): | ||||||
|                     parts.append(field) |                     parts.append(field) | ||||||
|                     append_field = False |                     append_field = False | ||||||
|                 # is last and CachedReferenceField |                 # is last and CachedReferenceField | ||||||
|                 elif isinstance(field, CachedReferenceField) and fields[-1] == field: |                 elif isinstance(field, CachedReferenceField) and fields[-1] == field: | ||||||
|                     parts.append('%s._id' % field.db_field) |                     parts.append("%s._id" % field.db_field) | ||||||
|                 else: |                 else: | ||||||
|                     parts.append(field.db_field) |                     parts.append(field.db_field) | ||||||
|  |  | ||||||
| @@ -83,15 +114,15 @@ def query(_doc_cls=None, **kwargs): | |||||||
|             # Convert value to proper value |             # Convert value to proper value | ||||||
|             field = cleaned_fields[-1] |             field = cleaned_fields[-1] | ||||||
|  |  | ||||||
|             singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not'] |             singular_ops = [None, "ne", "gt", "gte", "lt", "lte", "not"] | ||||||
|             singular_ops += STRING_OPERATORS |             singular_ops += STRING_OPERATORS | ||||||
|             if op in singular_ops: |             if op in singular_ops: | ||||||
|                 value = field.prepare_query_value(op, value) |                 value = field.prepare_query_value(op, value) | ||||||
|  |  | ||||||
|                 if isinstance(field, CachedReferenceField) and value: |                 if isinstance(field, CachedReferenceField) and value: | ||||||
|                     value = value['_id'] |                     value = value["_id"] | ||||||
|  |  | ||||||
|             elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict): |             elif op in ("in", "nin", "all", "near") and not isinstance(value, dict): | ||||||
|                 # Raise an error if the in/nin/all/near param is not iterable. |                 # Raise an error if the in/nin/all/near param is not iterable. | ||||||
|                 value = _prepare_query_for_iterable(field, op, value) |                 value = _prepare_query_for_iterable(field, op, value) | ||||||
|  |  | ||||||
| @@ -101,71 +132,77 @@ def query(_doc_cls=None, **kwargs): | |||||||
|             # * If the value is an ObjectId, the key should be "field_name._ref.$id". |             # * If the value is an ObjectId, the key should be "field_name._ref.$id". | ||||||
|             if isinstance(field, GenericReferenceField): |             if isinstance(field, GenericReferenceField): | ||||||
|                 if isinstance(value, DBRef): |                 if isinstance(value, DBRef): | ||||||
|                     parts[-1] += '._ref' |                     parts[-1] += "._ref" | ||||||
|                 elif isinstance(value, ObjectId): |                 elif isinstance(value, ObjectId): | ||||||
|                     parts[-1] += '._ref.$id' |                     parts[-1] += "._ref.$id" | ||||||
|  |  | ||||||
|         # if op and op not in COMPARISON_OPERATORS: |         # if op and op not in COMPARISON_OPERATORS: | ||||||
|         if op: |         if op: | ||||||
|             if op in GEO_OPERATORS: |             if op in GEO_OPERATORS: | ||||||
|                 value = _geo_operator(field, op, value) |                 value = _geo_operator(field, op, value) | ||||||
|             elif op in ('match', 'elemMatch'): |             elif op in ("match", "elemMatch"): | ||||||
|                 ListField = _import_class('ListField') |                 ListField = _import_class("ListField") | ||||||
|                 EmbeddedDocumentField = _import_class('EmbeddedDocumentField') |                 EmbeddedDocumentField = _import_class("EmbeddedDocumentField") | ||||||
|                 if ( |                 if ( | ||||||
|                     isinstance(value, dict) and |                     isinstance(value, dict) | ||||||
|                     isinstance(field, ListField) and |                     and isinstance(field, ListField) | ||||||
|                     isinstance(field.field, EmbeddedDocumentField) |                     and isinstance(field.field, EmbeddedDocumentField) | ||||||
|                 ): |                 ): | ||||||
|                     value = query(field.field.document_type, **value) |                     value = query(field.field.document_type, **value) | ||||||
|                 else: |                 else: | ||||||
|                     value = field.prepare_query_value(op, value) |                     value = field.prepare_query_value(op, value) | ||||||
|                 value = {'$elemMatch': value} |                 value = {"$elemMatch": value} | ||||||
|             elif op in CUSTOM_OPERATORS: |             elif op in CUSTOM_OPERATORS: | ||||||
|                 NotImplementedError('Custom method "%s" has not ' |                 NotImplementedError( | ||||||
|                                     'been implemented' % op) |                     'Custom method "%s" has not ' "been implemented" % op | ||||||
|  |                 ) | ||||||
|             elif op not in STRING_OPERATORS: |             elif op not in STRING_OPERATORS: | ||||||
|                 value = {'$' + op: value} |                 value = {"$" + op: value} | ||||||
|  |  | ||||||
|         if negate: |         if negate: | ||||||
|             value = {'$not': value} |             value = {"$not": value} | ||||||
|  |  | ||||||
|         for i, part in indices: |         for i, part in indices: | ||||||
|             parts.insert(i, part) |             parts.insert(i, part) | ||||||
|  |  | ||||||
|         key = '.'.join(parts) |         key = ".".join(parts) | ||||||
|  |  | ||||||
|         if op is None or key not in mongo_query: |         if key not in mongo_query: | ||||||
|             mongo_query[key] = value |             mongo_query[key] = value | ||||||
|         elif key in mongo_query: |         else: | ||||||
|             if isinstance(mongo_query[key], dict) and isinstance(value, dict): |             if isinstance(mongo_query[key], dict) and isinstance(value, dict): | ||||||
|                 mongo_query[key].update(value) |                 mongo_query[key].update(value) | ||||||
|                 # $max/minDistance needs to come last - convert to SON |                 # $max/minDistance needs to come last - convert to SON | ||||||
|                 value_dict = mongo_query[key] |                 value_dict = mongo_query[key] | ||||||
|                 if ('$maxDistance' in value_dict or '$minDistance' in value_dict) and \ |                 if ("$maxDistance" in value_dict or "$minDistance" in value_dict) and ( | ||||||
|                         ('$near' in value_dict or '$nearSphere' in value_dict): |                     "$near" in value_dict or "$nearSphere" in value_dict | ||||||
|  |                 ): | ||||||
|                     value_son = SON() |                     value_son = SON() | ||||||
|                     for k, v in iteritems(value_dict): |                     for k, v in value_dict.items(): | ||||||
|                         if k == '$maxDistance' or k == '$minDistance': |                         if k == "$maxDistance" or k == "$minDistance": | ||||||
|                             continue |                             continue | ||||||
|                         value_son[k] = v |                         value_son[k] = v | ||||||
|                     # Required for MongoDB >= 2.6, may fail when combining |                     # Required for MongoDB >= 2.6, may fail when combining | ||||||
|                     # PyMongo 3+ and MongoDB < 2.6 |                     # PyMongo 3+ and MongoDB < 2.6 | ||||||
|                     near_embedded = False |                     near_embedded = False | ||||||
|                     for near_op in ('$near', '$nearSphere'): |                     for near_op in ("$near", "$nearSphere"): | ||||||
|                         if isinstance(value_dict.get(near_op), dict): |                         if isinstance(value_dict.get(near_op), dict): | ||||||
|                             value_son[near_op] = SON(value_son[near_op]) |                             value_son[near_op] = SON(value_son[near_op]) | ||||||
|                             if '$maxDistance' in value_dict: |                             if "$maxDistance" in value_dict: | ||||||
|                                 value_son[near_op]['$maxDistance'] = value_dict['$maxDistance'] |                                 value_son[near_op]["$maxDistance"] = value_dict[ | ||||||
|                             if '$minDistance' in value_dict: |                                     "$maxDistance" | ||||||
|                                 value_son[near_op]['$minDistance'] = value_dict['$minDistance'] |                                 ] | ||||||
|  |                             if "$minDistance" in value_dict: | ||||||
|  |                                 value_son[near_op]["$minDistance"] = value_dict[ | ||||||
|  |                                     "$minDistance" | ||||||
|  |                                 ] | ||||||
|                             near_embedded = True |                             near_embedded = True | ||||||
|  |  | ||||||
|                     if not near_embedded: |                     if not near_embedded: | ||||||
|                         if '$maxDistance' in value_dict: |                         if "$maxDistance" in value_dict: | ||||||
|                             value_son['$maxDistance'] = value_dict['$maxDistance'] |                             value_son["$maxDistance"] = value_dict["$maxDistance"] | ||||||
|                         if '$minDistance' in value_dict: |                         if "$minDistance" in value_dict: | ||||||
|                             value_son['$minDistance'] = value_dict['$minDistance'] |                             value_son["$minDistance"] = value_dict["$minDistance"] | ||||||
|                     mongo_query[key] = value_son |                     mongo_query[key] = value_son | ||||||
|             else: |             else: | ||||||
|                 # Store for manually merging later |                 # Store for manually merging later | ||||||
| @@ -177,10 +214,10 @@ def query(_doc_cls=None, **kwargs): | |||||||
|         del mongo_query[k] |         del mongo_query[k] | ||||||
|         if isinstance(v, list): |         if isinstance(v, list): | ||||||
|             value = [{k: val} for val in v] |             value = [{k: val} for val in v] | ||||||
|             if '$and' in mongo_query.keys(): |             if "$and" in mongo_query.keys(): | ||||||
|                 mongo_query['$and'].extend(value) |                 mongo_query["$and"].extend(value) | ||||||
|             else: |             else: | ||||||
|                 mongo_query['$and'] = value |                 mongo_query["$and"] = value | ||||||
|  |  | ||||||
|     return mongo_query |     return mongo_query | ||||||
|  |  | ||||||
| @@ -192,15 +229,15 @@ def update(_doc_cls=None, **update): | |||||||
|     mongo_update = {} |     mongo_update = {} | ||||||
|  |  | ||||||
|     for key, value in update.items(): |     for key, value in update.items(): | ||||||
|         if key == '__raw__': |         if key == "__raw__": | ||||||
|             mongo_update.update(value) |             mongo_update.update(value) | ||||||
|             continue |             continue | ||||||
|  |  | ||||||
|         parts = key.split('__') |         parts = key.split("__") | ||||||
|  |  | ||||||
|         # if there is no operator, default to 'set' |         # if there is no operator, default to 'set' | ||||||
|         if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS: |         if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS: | ||||||
|             parts.insert(0, 'set') |             parts.insert(0, "set") | ||||||
|  |  | ||||||
|         # Check for an operator and transform to mongo-style if there is |         # Check for an operator and transform to mongo-style if there is | ||||||
|         op = None |         op = None | ||||||
| @@ -208,13 +245,13 @@ def update(_doc_cls=None, **update): | |||||||
|             op = parts.pop(0) |             op = parts.pop(0) | ||||||
|             # Convert Pythonic names to Mongo equivalents |             # Convert Pythonic names to Mongo equivalents | ||||||
|             operator_map = { |             operator_map = { | ||||||
|                 'push_all': 'pushAll', |                 "push_all": "pushAll", | ||||||
|                 'pull_all': 'pullAll', |                 "pull_all": "pullAll", | ||||||
|                 'dec': 'inc', |                 "dec": "inc", | ||||||
|                 'add_to_set': 'addToSet', |                 "add_to_set": "addToSet", | ||||||
|                 'set_on_insert': 'setOnInsert' |                 "set_on_insert": "setOnInsert", | ||||||
|             } |             } | ||||||
|             if op == 'dec': |             if op == "dec": | ||||||
|                 # Support decrement by flipping a positive value's sign |                 # Support decrement by flipping a positive value's sign | ||||||
|                 # and using 'inc' |                 # and using 'inc' | ||||||
|                 value = -value |                 value = -value | ||||||
| @@ -227,7 +264,7 @@ def update(_doc_cls=None, **update): | |||||||
|             match = parts.pop() |             match = parts.pop() | ||||||
|  |  | ||||||
|         # Allow to escape operator-like field name by __ |         # Allow to escape operator-like field name by __ | ||||||
|         if len(parts) > 1 and parts[-1] == '': |         if len(parts) > 1 and parts[-1] == "": | ||||||
|             parts.pop() |             parts.pop() | ||||||
|  |  | ||||||
|         if _doc_cls: |         if _doc_cls: | ||||||
| @@ -242,10 +279,10 @@ def update(_doc_cls=None, **update): | |||||||
|             appended_sub_field = False |             appended_sub_field = False | ||||||
|             for field in fields: |             for field in fields: | ||||||
|                 append_field = True |                 append_field = True | ||||||
|                 if isinstance(field, six.string_types): |                 if isinstance(field, str): | ||||||
|                     # Convert the S operator to $ |                     # Convert the S operator to $ | ||||||
|                     if field == 'S': |                     if field == "S": | ||||||
|                         field = '$' |                         field = "$" | ||||||
|                     parts.append(field) |                     parts.append(field) | ||||||
|                     append_field = False |                     append_field = False | ||||||
|                 else: |                 else: | ||||||
| @@ -253,7 +290,7 @@ def update(_doc_cls=None, **update): | |||||||
|                 if append_field: |                 if append_field: | ||||||
|                     appended_sub_field = False |                     appended_sub_field = False | ||||||
|                     cleaned_fields.append(field) |                     cleaned_fields.append(field) | ||||||
|                     if hasattr(field, 'field'): |                     if hasattr(field, "field"): | ||||||
|                         cleaned_fields.append(field.field) |                         cleaned_fields.append(field.field) | ||||||
|                         appended_sub_field = True |                         appended_sub_field = True | ||||||
|  |  | ||||||
| @@ -263,52 +300,53 @@ def update(_doc_cls=None, **update): | |||||||
|             else: |             else: | ||||||
|                 field = cleaned_fields[-1] |                 field = cleaned_fields[-1] | ||||||
|  |  | ||||||
|             GeoJsonBaseField = _import_class('GeoJsonBaseField') |             GeoJsonBaseField = _import_class("GeoJsonBaseField") | ||||||
|             if isinstance(field, GeoJsonBaseField): |             if isinstance(field, GeoJsonBaseField): | ||||||
|                 value = field.to_mongo(value) |                 value = field.to_mongo(value) | ||||||
|  |  | ||||||
|             if op == 'pull': |             if op == "pull": | ||||||
|                 if field.required or value is not None: |                 if field.required or value is not None: | ||||||
|                     if match in ('in', 'nin') and not isinstance(value, dict): |                     if match in ("in", "nin") and not isinstance(value, dict): | ||||||
|                         value = _prepare_query_for_iterable(field, op, value) |                         value = _prepare_query_for_iterable(field, op, value) | ||||||
|                     else: |                     else: | ||||||
|                         value = field.prepare_query_value(op, value) |                         value = field.prepare_query_value(op, value) | ||||||
|             elif op == 'push' and isinstance(value, (list, tuple, set)): |             elif op == "push" and isinstance(value, (list, tuple, set)): | ||||||
|                 value = [field.prepare_query_value(op, v) for v in value] |                 value = [field.prepare_query_value(op, v) for v in value] | ||||||
|             elif op in (None, 'set', 'push'): |             elif op in (None, "set", "push"): | ||||||
|                 if field.required or value is not None: |                 if field.required or value is not None: | ||||||
|                     value = field.prepare_query_value(op, value) |                     value = field.prepare_query_value(op, value) | ||||||
|             elif op in ('pushAll', 'pullAll'): |             elif op in ("pushAll", "pullAll"): | ||||||
|                 value = [field.prepare_query_value(op, v) for v in value] |                 value = [field.prepare_query_value(op, v) for v in value] | ||||||
|             elif op in ('addToSet', 'setOnInsert'): |             elif op in ("addToSet", "setOnInsert"): | ||||||
|                 if isinstance(value, (list, tuple, set)): |                 if isinstance(value, (list, tuple, set)): | ||||||
|                     value = [field.prepare_query_value(op, v) for v in value] |                     value = [field.prepare_query_value(op, v) for v in value] | ||||||
|                 elif field.required or value is not None: |                 elif field.required or value is not None: | ||||||
|                     value = field.prepare_query_value(op, value) |                     value = field.prepare_query_value(op, value) | ||||||
|             elif op == 'unset': |             elif op == "unset": | ||||||
|                 value = 1 |                 value = 1 | ||||||
|             elif op == 'inc': |             elif op == "inc": | ||||||
|                 value = field.prepare_query_value(op, value) |                 value = field.prepare_query_value(op, value) | ||||||
|  |  | ||||||
|         if match: |         if match: | ||||||
|             match = '$' + match |             match = "$" + match | ||||||
|             value = {match: value} |             value = {match: value} | ||||||
|  |  | ||||||
|         key = '.'.join(parts) |         key = ".".join(parts) | ||||||
|  |  | ||||||
|         if 'pull' in op and '.' in key: |         if "pull" in op and "." in key: | ||||||
|             # Dot operators don't work on pull operations |             # Dot operators don't work on pull operations | ||||||
|             # unless they point to a list field |             # unless they point to a list field | ||||||
|             # Otherwise it uses nested dict syntax |             # Otherwise it uses nested dict syntax | ||||||
|             if op == 'pullAll': |             if op == "pullAll": | ||||||
|                 raise InvalidQueryError('pullAll operations only support ' |                 raise InvalidQueryError( | ||||||
|                                         'a single field depth') |                     "pullAll operations only support a single field depth" | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|             # Look for the last list field and use dot notation until there |             # Look for the last list field and use dot notation until there | ||||||
|             field_classes = [c.__class__ for c in cleaned_fields] |             field_classes = [c.__class__ for c in cleaned_fields] | ||||||
|             field_classes.reverse() |             field_classes.reverse() | ||||||
|             ListField = _import_class('ListField') |             ListField = _import_class("ListField") | ||||||
|             EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') |             EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField") | ||||||
|             if ListField in field_classes or EmbeddedDocumentListField in field_classes: |             if ListField in field_classes or EmbeddedDocumentListField in field_classes: | ||||||
|                 # Join all fields via dot notation to the last ListField or EmbeddedDocumentListField |                 # Join all fields via dot notation to the last ListField or EmbeddedDocumentListField | ||||||
|                 # Then process as normal |                 # Then process as normal | ||||||
| @@ -317,37 +355,36 @@ def update(_doc_cls=None, **update): | |||||||
|                 else: |                 else: | ||||||
|                     _check_field = EmbeddedDocumentListField |                     _check_field = EmbeddedDocumentListField | ||||||
|  |  | ||||||
|                 last_listField = len( |                 last_listField = len(cleaned_fields) - field_classes.index(_check_field) | ||||||
|                     cleaned_fields) - field_classes.index(_check_field) |                 key = ".".join(parts[:last_listField]) | ||||||
|                 key = '.'.join(parts[:last_listField]) |  | ||||||
|                 parts = parts[last_listField:] |                 parts = parts[last_listField:] | ||||||
|                 parts.insert(0, key) |                 parts.insert(0, key) | ||||||
|  |  | ||||||
|             parts.reverse() |             parts.reverse() | ||||||
|             for key in parts: |             for key in parts: | ||||||
|                 value = {key: value} |                 value = {key: value} | ||||||
|         elif op == 'addToSet' and isinstance(value, list): |         elif op == "addToSet" and isinstance(value, list): | ||||||
|             value = {key: {'$each': value}} |             value = {key: {"$each": value}} | ||||||
|         elif op in ('push', 'pushAll'): |         elif op in ("push", "pushAll"): | ||||||
|             if parts[-1].isdigit(): |             if parts[-1].isdigit(): | ||||||
|                 key = '.'.join(parts[0:-1]) |                 key = ".".join(parts[0:-1]) | ||||||
|                 position = int(parts[-1]) |                 position = int(parts[-1]) | ||||||
|                 # $position expects an iterable. If pushing a single value, |                 # $position expects an iterable. If pushing a single value, | ||||||
|                 # wrap it in a list. |                 # wrap it in a list. | ||||||
|                 if not isinstance(value, (set, tuple, list)): |                 if not isinstance(value, (set, tuple, list)): | ||||||
|                     value = [value] |                     value = [value] | ||||||
|                 value = {key: {'$each': value, '$position': position}} |                 value = {key: {"$each": value, "$position": position}} | ||||||
|             else: |             else: | ||||||
|                 if op == 'pushAll': |                 if op == "pushAll": | ||||||
|                     op = 'push'  # convert to non-deprecated keyword |                     op = "push"  # convert to non-deprecated keyword | ||||||
|                     if not isinstance(value, (set, tuple, list)): |                     if not isinstance(value, (set, tuple, list)): | ||||||
|                         value = [value] |                         value = [value] | ||||||
|                     value = {key: {'$each': value}} |                     value = {key: {"$each": value}} | ||||||
|                 else: |                 else: | ||||||
|                     value = {key: value} |                     value = {key: value} | ||||||
|         else: |         else: | ||||||
|             value = {key: value} |             value = {key: value} | ||||||
|         key = '$' + op |         key = "$" + op | ||||||
|         if key not in mongo_update: |         if key not in mongo_update: | ||||||
|             mongo_update[key] = value |             mongo_update[key] = value | ||||||
|         elif key in mongo_update and isinstance(mongo_update[key], dict): |         elif key in mongo_update and isinstance(mongo_update[key], dict): | ||||||
| @@ -358,45 +395,47 @@ def update(_doc_cls=None, **update): | |||||||
|  |  | ||||||
| def _geo_operator(field, op, value): | def _geo_operator(field, op, value): | ||||||
|     """Helper to return the query for a given geo query.""" |     """Helper to return the query for a given geo query.""" | ||||||
|     if op == 'max_distance': |     if op == "max_distance": | ||||||
|         value = {'$maxDistance': value} |         value = {"$maxDistance": value} | ||||||
|     elif op == 'min_distance': |     elif op == "min_distance": | ||||||
|         value = {'$minDistance': value} |         value = {"$minDistance": value} | ||||||
|     elif field._geo_index == pymongo.GEO2D: |     elif field._geo_index == pymongo.GEO2D: | ||||||
|         if op == 'within_distance': |         if op == "within_distance": | ||||||
|             value = {'$within': {'$center': value}} |             value = {"$within": {"$center": value}} | ||||||
|         elif op == 'within_spherical_distance': |         elif op == "within_spherical_distance": | ||||||
|             value = {'$within': {'$centerSphere': value}} |             value = {"$within": {"$centerSphere": value}} | ||||||
|         elif op == 'within_polygon': |         elif op == "within_polygon": | ||||||
|             value = {'$within': {'$polygon': value}} |             value = {"$within": {"$polygon": value}} | ||||||
|         elif op == 'near': |         elif op == "near": | ||||||
|             value = {'$near': value} |             value = {"$near": value} | ||||||
|         elif op == 'near_sphere': |         elif op == "near_sphere": | ||||||
|             value = {'$nearSphere': value} |             value = {"$nearSphere": value} | ||||||
|         elif op == 'within_box': |         elif op == "within_box": | ||||||
|             value = {'$within': {'$box': value}} |             value = {"$within": {"$box": value}} | ||||||
|         else: |  | ||||||
|             raise NotImplementedError('Geo method "%s" has not been ' |  | ||||||
|                                       'implemented for a GeoPointField' % op) |  | ||||||
|     else: |  | ||||||
|         if op == 'geo_within': |  | ||||||
|             value = {'$geoWithin': _infer_geometry(value)} |  | ||||||
|         elif op == 'geo_within_box': |  | ||||||
|             value = {'$geoWithin': {'$box': value}} |  | ||||||
|         elif op == 'geo_within_polygon': |  | ||||||
|             value = {'$geoWithin': {'$polygon': value}} |  | ||||||
|         elif op == 'geo_within_center': |  | ||||||
|             value = {'$geoWithin': {'$center': value}} |  | ||||||
|         elif op == 'geo_within_sphere': |  | ||||||
|             value = {'$geoWithin': {'$centerSphere': value}} |  | ||||||
|         elif op == 'geo_intersects': |  | ||||||
|             value = {'$geoIntersects': _infer_geometry(value)} |  | ||||||
|         elif op == 'near': |  | ||||||
|             value = {'$near': _infer_geometry(value)} |  | ||||||
|         else: |         else: | ||||||
|             raise NotImplementedError( |             raise NotImplementedError( | ||||||
|                 'Geo method "%s" has not been implemented for a %s ' |                 'Geo method "%s" has not been ' "implemented for a GeoPointField" % op | ||||||
|                 % (op, field._name) |             ) | ||||||
|  |     else: | ||||||
|  |         if op == "geo_within": | ||||||
|  |             value = {"$geoWithin": _infer_geometry(value)} | ||||||
|  |         elif op == "geo_within_box": | ||||||
|  |             value = {"$geoWithin": {"$box": value}} | ||||||
|  |         elif op == "geo_within_polygon": | ||||||
|  |             value = {"$geoWithin": {"$polygon": value}} | ||||||
|  |         elif op == "geo_within_center": | ||||||
|  |             value = {"$geoWithin": {"$center": value}} | ||||||
|  |         elif op == "geo_within_sphere": | ||||||
|  |             value = {"$geoWithin": {"$centerSphere": value}} | ||||||
|  |         elif op == "geo_intersects": | ||||||
|  |             value = {"$geoIntersects": _infer_geometry(value)} | ||||||
|  |         elif op == "near": | ||||||
|  |             value = {"$near": _infer_geometry(value)} | ||||||
|  |         else: | ||||||
|  |             raise NotImplementedError( | ||||||
|  |                 'Geo method "{}" has not been implemented for a {} '.format( | ||||||
|  |                     op, field._name | ||||||
|  |                 ) | ||||||
|             ) |             ) | ||||||
|     return value |     return value | ||||||
|  |  | ||||||
| @@ -406,51 +445,58 @@ def _infer_geometry(value): | |||||||
|     given value. |     given value. | ||||||
|     """ |     """ | ||||||
|     if isinstance(value, dict): |     if isinstance(value, dict): | ||||||
|         if '$geometry' in value: |         if "$geometry" in value: | ||||||
|             return value |             return value | ||||||
|         elif 'coordinates' in value and 'type' in value: |         elif "coordinates" in value and "type" in value: | ||||||
|             return {'$geometry': value} |             return {"$geometry": value} | ||||||
|         raise InvalidQueryError('Invalid $geometry dictionary should have ' |         raise InvalidQueryError( | ||||||
|                                 'type and coordinates keys') |             "Invalid $geometry dictionary should have type and coordinates keys" | ||||||
|  |         ) | ||||||
|     elif isinstance(value, (list, set)): |     elif isinstance(value, (list, set)): | ||||||
|         # TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon? |         # TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon? | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             value[0][0][0] |             value[0][0][0] | ||||||
|             return {'$geometry': {'type': 'Polygon', 'coordinates': value}} |             return {"$geometry": {"type": "Polygon", "coordinates": value}} | ||||||
|         except (TypeError, IndexError): |         except (TypeError, IndexError): | ||||||
|             pass |             pass | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             value[0][0] |             value[0][0] | ||||||
|             return {'$geometry': {'type': 'LineString', 'coordinates': value}} |             return {"$geometry": {"type": "LineString", "coordinates": value}} | ||||||
|         except (TypeError, IndexError): |         except (TypeError, IndexError): | ||||||
|             pass |             pass | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             value[0] |             value[0] | ||||||
|             return {'$geometry': {'type': 'Point', 'coordinates': value}} |             return {"$geometry": {"type": "Point", "coordinates": value}} | ||||||
|         except (TypeError, IndexError): |         except (TypeError, IndexError): | ||||||
|             pass |             pass | ||||||
|  |  | ||||||
|     raise InvalidQueryError('Invalid $geometry data. Can be either a ' |     raise InvalidQueryError( | ||||||
|                             'dictionary or (nested) lists of coordinate(s)') |         "Invalid $geometry data. Can be either a " | ||||||
|  |         "dictionary or (nested) lists of coordinate(s)" | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
| def _prepare_query_for_iterable(field, op, value): | def _prepare_query_for_iterable(field, op, value): | ||||||
|     # We need a special check for BaseDocument, because - although it's iterable - using |     # We need a special check for BaseDocument, because - although it's iterable - using | ||||||
|     # it as such in the context of this method is most definitely a mistake. |     # it as such in the context of this method is most definitely a mistake. | ||||||
|     BaseDocument = _import_class('BaseDocument') |     BaseDocument = _import_class("BaseDocument") | ||||||
|  |  | ||||||
|     if isinstance(value, BaseDocument): |     if isinstance(value, BaseDocument): | ||||||
|         raise TypeError("When using the `in`, `nin`, `all`, or " |         raise TypeError( | ||||||
|                         "`near`-operators you can\'t use a " |             "When using the `in`, `nin`, `all`, or " | ||||||
|                         "`Document`, you must wrap your object " |             "`near`-operators you can't use a " | ||||||
|                         "in a list (object -> [object]).") |             "`Document`, you must wrap your object " | ||||||
|  |             "in a list (object -> [object])." | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     if not hasattr(value, '__iter__'): |     if not hasattr(value, "__iter__"): | ||||||
|         raise TypeError("The `in`, `nin`, `all`, or " |         raise TypeError( | ||||||
|                         "`near`-operators must be applied to an " |             "The `in`, `nin`, `all`, or " | ||||||
|                         "iterable (e.g. a list).") |             "`near`-operators must be applied to an " | ||||||
|  |             "iterable (e.g. a list)." | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     return [field.prepare_query_value(op, v) for v in value] |     return [field.prepare_query_value(op, v) for v in value] | ||||||
|   | |||||||
| @@ -1,12 +1,13 @@ | |||||||
| import copy | import copy | ||||||
|  | import warnings | ||||||
|  |  | ||||||
| from mongoengine.errors import InvalidQueryError | from mongoengine.errors import InvalidQueryError | ||||||
| from mongoengine.queryset import transform | from mongoengine.queryset import transform | ||||||
|  |  | ||||||
| __all__ = ('Q', 'QNode') | __all__ = ("Q", "QNode") | ||||||
|  |  | ||||||
|  |  | ||||||
| class QNodeVisitor(object): | class QNodeVisitor: | ||||||
|     """Base visitor class for visiting Q-object nodes in a query tree. |     """Base visitor class for visiting Q-object nodes in a query tree. | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
| @@ -69,16 +70,16 @@ class QueryCompilerVisitor(QNodeVisitor): | |||||||
|         self.document = document |         self.document = document | ||||||
|  |  | ||||||
|     def visit_combination(self, combination): |     def visit_combination(self, combination): | ||||||
|         operator = '$and' |         operator = "$and" | ||||||
|         if combination.operation == combination.OR: |         if combination.operation == combination.OR: | ||||||
|             operator = '$or' |             operator = "$or" | ||||||
|         return {operator: combination.children} |         return {operator: combination.children} | ||||||
|  |  | ||||||
|     def visit_query(self, query): |     def visit_query(self, query): | ||||||
|         return transform.query(self.document, **query.query) |         return transform.query(self.document, **query.query) | ||||||
|  |  | ||||||
|  |  | ||||||
| class QNode(object): | class QNode: | ||||||
|     """Base class for nodes in query trees.""" |     """Base class for nodes in query trees.""" | ||||||
|  |  | ||||||
|     AND = 0 |     AND = 0 | ||||||
| @@ -96,9 +97,11 @@ class QNode(object): | |||||||
|         """Combine this node with another node into a QCombination |         """Combine this node with another node into a QCombination | ||||||
|         object. |         object. | ||||||
|         """ |         """ | ||||||
|         if getattr(other, 'empty', True): |         # If the other Q() is empty, ignore it and just use `self`. | ||||||
|  |         if getattr(other, "empty", True): | ||||||
|             return self |             return self | ||||||
|  |  | ||||||
|  |         # Or if this Q is empty, ignore it and just use `other`. | ||||||
|         if self.empty: |         if self.empty: | ||||||
|             return other |             return other | ||||||
|  |  | ||||||
| @@ -106,6 +109,8 @@ class QNode(object): | |||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def empty(self): |     def empty(self): | ||||||
|  |         msg = "'empty' property is deprecated in favour of using 'not bool(filter)'" | ||||||
|  |         warnings.warn(msg, DeprecationWarning) | ||||||
|         return False |         return False | ||||||
|  |  | ||||||
|     def __or__(self, other): |     def __or__(self, other): | ||||||
| @@ -132,8 +137,11 @@ class QCombination(QNode): | |||||||
|                 self.children.append(node) |                 self.children.append(node) | ||||||
|  |  | ||||||
|     def __repr__(self): |     def __repr__(self): | ||||||
|         op = ' & ' if self.operation is self.AND else ' | ' |         op = " & " if self.operation is self.AND else " | " | ||||||
|         return '(%s)' % op.join([repr(node) for node in self.children]) |         return "(%s)" % op.join([repr(node) for node in self.children]) | ||||||
|  |  | ||||||
|  |     def __bool__(self): | ||||||
|  |         return bool(self.children) | ||||||
|  |  | ||||||
|     def accept(self, visitor): |     def accept(self, visitor): | ||||||
|         for i in range(len(self.children)): |         for i in range(len(self.children)): | ||||||
| @@ -144,8 +152,17 @@ class QCombination(QNode): | |||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def empty(self): |     def empty(self): | ||||||
|  |         msg = "'empty' property is deprecated in favour of using 'not bool(filter)'" | ||||||
|  |         warnings.warn(msg, DeprecationWarning) | ||||||
|         return not bool(self.children) |         return not bool(self.children) | ||||||
|  |  | ||||||
|  |     def __eq__(self, other): | ||||||
|  |         return ( | ||||||
|  |             self.__class__ == other.__class__ | ||||||
|  |             and self.operation == other.operation | ||||||
|  |             and self.children == other.children | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
| class Q(QNode): | class Q(QNode): | ||||||
|     """A simple query object, used in a query tree to build up more complex |     """A simple query object, used in a query tree to build up more complex | ||||||
| @@ -156,7 +173,13 @@ class Q(QNode): | |||||||
|         self.query = query |         self.query = query | ||||||
|  |  | ||||||
|     def __repr__(self): |     def __repr__(self): | ||||||
|         return 'Q(**%s)' % repr(self.query) |         return "Q(**%s)" % repr(self.query) | ||||||
|  |  | ||||||
|  |     def __bool__(self): | ||||||
|  |         return bool(self.query) | ||||||
|  |  | ||||||
|  |     def __eq__(self, other): | ||||||
|  |         return self.__class__ == other.__class__ and self.query == other.query | ||||||
|  |  | ||||||
|     def accept(self, visitor): |     def accept(self, visitor): | ||||||
|         return visitor.visit_query(self) |         return visitor.visit_query(self) | ||||||
|   | |||||||
| @@ -1,5 +1,12 @@ | |||||||
| __all__ = ('pre_init', 'post_init', 'pre_save', 'pre_save_post_validation', | __all__ = ( | ||||||
|            'post_save', 'pre_delete', 'post_delete') |     "pre_init", | ||||||
|  |     "post_init", | ||||||
|  |     "pre_save", | ||||||
|  |     "pre_save_post_validation", | ||||||
|  |     "post_save", | ||||||
|  |     "pre_delete", | ||||||
|  |     "post_delete", | ||||||
|  | ) | ||||||
|  |  | ||||||
| signals_available = False | signals_available = False | ||||||
| try: | try: | ||||||
| @@ -7,11 +14,12 @@ try: | |||||||
|  |  | ||||||
|     signals_available = True |     signals_available = True | ||||||
| except ImportError: | except ImportError: | ||||||
|     class Namespace(object): |  | ||||||
|  |     class Namespace: | ||||||
|         def signal(self, name, doc=None): |         def signal(self, name, doc=None): | ||||||
|             return _FakeSignal(name, doc) |             return _FakeSignal(name, doc) | ||||||
|  |  | ||||||
|     class _FakeSignal(object): |     class _FakeSignal: | ||||||
|         """If blinker is unavailable, create a fake class with the same |         """If blinker is unavailable, create a fake class with the same | ||||||
|         interface that allows sending of signals but will fail with an |         interface that allows sending of signals but will fail with an | ||||||
|         error on anything else.  Instead of doing anything on send, it |         error on anything else.  Instead of doing anything on send, it | ||||||
| @@ -23,13 +31,16 @@ except ImportError: | |||||||
|             self.__doc__ = doc |             self.__doc__ = doc | ||||||
|  |  | ||||||
|         def _fail(self, *args, **kwargs): |         def _fail(self, *args, **kwargs): | ||||||
|             raise RuntimeError('signalling support is unavailable ' |             raise RuntimeError( | ||||||
|                                'because the blinker library is ' |                 "signalling support is unavailable " | ||||||
|                                'not installed.') |                 "because the blinker library is " | ||||||
|  |                 "not installed." | ||||||
|  |             ) | ||||||
|  |  | ||||||
|         send = lambda *a, **kw: None  # noqa |         send = lambda *a, **kw: None  # noqa | ||||||
|         connect = disconnect = has_receivers_for = receivers_for = \ |         connect = ( | ||||||
|             temporarily_connected_to = _fail |             disconnect | ||||||
|  |         ) = has_receivers_for = receivers_for = temporarily_connected_to = _fail | ||||||
|         del _fail |         del _fail | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -37,12 +48,12 @@ except ImportError: | |||||||
| # not put signals in here.  Create your own namespace instead. | # not put signals in here.  Create your own namespace instead. | ||||||
| _signals = Namespace() | _signals = Namespace() | ||||||
|  |  | ||||||
| pre_init = _signals.signal('pre_init') | pre_init = _signals.signal("pre_init") | ||||||
| post_init = _signals.signal('post_init') | post_init = _signals.signal("post_init") | ||||||
| pre_save = _signals.signal('pre_save') | pre_save = _signals.signal("pre_save") | ||||||
| pre_save_post_validation = _signals.signal('pre_save_post_validation') | pre_save_post_validation = _signals.signal("pre_save_post_validation") | ||||||
| post_save = _signals.signal('post_save') | post_save = _signals.signal("post_save") | ||||||
| pre_delete = _signals.signal('pre_delete') | pre_delete = _signals.signal("pre_delete") | ||||||
| post_delete = _signals.signal('post_delete') | post_delete = _signals.signal("post_delete") | ||||||
| pre_bulk_insert = _signals.signal('pre_bulk_insert') | pre_bulk_insert = _signals.signal("pre_bulk_insert") | ||||||
| post_bulk_insert = _signals.signal('post_bulk_insert') | post_bulk_insert = _signals.signal("post_bulk_insert") | ||||||
|   | |||||||
| @@ -51,4 +51,4 @@ rm -rf $RPM_BUILD_ROOT | |||||||
| # %{python_sitearch}/* | # %{python_sitearch}/* | ||||||
|  |  | ||||||
| %changelog | %changelog | ||||||
| * See: http://docs.mongoengine.org/en/latest/changelog.html | * See: http://docs.mongoengine.org/en/latest/changelog.html | ||||||
|   | |||||||
							
								
								
									
										8
									
								
								requirements-dev.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										8
									
								
								requirements-dev.txt
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,8 @@ | |||||||
|  | black | ||||||
|  | flake8 | ||||||
|  | flake8-import-order | ||||||
|  | pre-commit | ||||||
|  | pytest | ||||||
|  | ipdb | ||||||
|  | ipython | ||||||
|  | tox | ||||||
| @@ -1,7 +1,3 @@ | |||||||
| nose |  | ||||||
| pymongo>=3.4 | pymongo>=3.4 | ||||||
| six==1.10.0 |  | ||||||
| flake8 |  | ||||||
| flake8-import-order |  | ||||||
| Sphinx==1.5.5 | Sphinx==1.5.5 | ||||||
| sphinx-rtd-theme==0.2.4 | sphinx-rtd-theme==0.2.4 | ||||||
|   | |||||||
							
								
								
									
										13
									
								
								setup.cfg
									
									
									
									
									
								
							
							
						
						
									
										13
									
								
								setup.cfg
									
									
									
									
									
								
							| @@ -1,11 +1,10 @@ | |||||||
| [nosetests] |  | ||||||
| verbosity=2 |  | ||||||
| detailed-errors=1 |  | ||||||
| #tests=tests |  | ||||||
| cover-package=mongoengine |  | ||||||
|  |  | ||||||
| [flake8] | [flake8] | ||||||
| ignore=E501,F401,F403,F405,I201,I202,W504, W605 | ignore=E501,F401,F403,F405,I201,I202,W504, W605, W503 | ||||||
| exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests | exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests | ||||||
| max-complexity=47 | max-complexity=47 | ||||||
| application-import-names=mongoengine,tests | application-import-names=mongoengine,tests | ||||||
|  |  | ||||||
|  | [tool:pytest] | ||||||
|  | # Limits the discovery to tests directory | ||||||
|  | # avoids that it runs for instance the benchmark | ||||||
|  | testpaths = tests | ||||||
|   | |||||||
							
								
								
									
										135
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										135
									
								
								setup.py
									
									
									
									
									
								
							| @@ -1,6 +1,9 @@ | |||||||
| import os | import os | ||||||
| import sys | import sys | ||||||
|  |  | ||||||
|  | from pkg_resources import normalize_path | ||||||
| from setuptools import find_packages, setup | from setuptools import find_packages, setup | ||||||
|  | from setuptools.command.test import test as TestCommand | ||||||
|  |  | ||||||
| # Hack to silence atexit traceback in newer python versions | # Hack to silence atexit traceback in newer python versions | ||||||
| try: | try: | ||||||
| @@ -8,13 +11,10 @@ try: | |||||||
| except ImportError: | except ImportError: | ||||||
|     pass |     pass | ||||||
|  |  | ||||||
| DESCRIPTION = ( | DESCRIPTION = "MongoEngine is a Python Object-Document Mapper for working with MongoDB." | ||||||
|     'MongoEngine is a Python Object-Document ' |  | ||||||
|     'Mapper for working with MongoDB.' |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| try: | try: | ||||||
|     with open('README.rst') as fin: |     with open("README.rst") as fin: | ||||||
|         LONG_DESCRIPTION = fin.read() |         LONG_DESCRIPTION = fin.read() | ||||||
| except Exception: | except Exception: | ||||||
|     LONG_DESCRIPTION = None |     LONG_DESCRIPTION = None | ||||||
| @@ -24,63 +24,124 @@ def get_version(version_tuple): | |||||||
|     """Return the version tuple as a string, e.g. for (0, 10, 7), |     """Return the version tuple as a string, e.g. for (0, 10, 7), | ||||||
|     return '0.10.7'. |     return '0.10.7'. | ||||||
|     """ |     """ | ||||||
|     return '.'.join(map(str, version_tuple)) |     return ".".join(map(str, version_tuple)) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class PyTest(TestCommand): | ||||||
|  |     """Will force pytest to search for tests inside the build directory | ||||||
|  |     for 2to3 converted code (used by tox), instead of the current directory. | ||||||
|  |     Required as long as we need 2to3 | ||||||
|  |  | ||||||
|  |     Known Limitation: https://tox.readthedocs.io/en/latest/example/pytest.html#known-issues-and-limitations | ||||||
|  |     Source: https://www.hackzine.org/python-testing-with-pytest-and-2to3-plus-tox-and-travis-ci.html | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     # https://pytest.readthedocs.io/en/2.7.3/goodpractises.html#integration-with-setuptools-test-commands | ||||||
|  |     # Allows to provide pytest command argument through the test runner command `python setup.py test` | ||||||
|  |     # e.g: `python setup.py test -a "-k=test"` | ||||||
|  |     # This only works for 1 argument though | ||||||
|  |     user_options = [("pytest-args=", "a", "Arguments to pass to py.test")] | ||||||
|  |  | ||||||
|  |     def initialize_options(self): | ||||||
|  |         TestCommand.initialize_options(self) | ||||||
|  |         self.pytest_args = "" | ||||||
|  |  | ||||||
|  |     def finalize_options(self): | ||||||
|  |         TestCommand.finalize_options(self) | ||||||
|  |         self.test_args = ["tests"] | ||||||
|  |         self.test_suite = True | ||||||
|  |  | ||||||
|  |     def run_tests(self): | ||||||
|  |         # import here, cause outside the eggs aren't loaded | ||||||
|  |         from pkg_resources import _namespace_packages | ||||||
|  |         import pytest | ||||||
|  |  | ||||||
|  |         # Purge modules under test from sys.modules. The test loader will | ||||||
|  |         # re-import them from the build location. Required when 2to3 is used | ||||||
|  |         # with namespace packages. | ||||||
|  |         if sys.version_info >= (3,) and getattr(self.distribution, "use_2to3", False): | ||||||
|  |             module = self.test_args[-1].split(".")[0] | ||||||
|  |             if module in _namespace_packages: | ||||||
|  |                 del_modules = [] | ||||||
|  |                 if module in sys.modules: | ||||||
|  |                     del_modules.append(module) | ||||||
|  |                 module += "." | ||||||
|  |                 for name in sys.modules: | ||||||
|  |                     if name.startswith(module): | ||||||
|  |                         del_modules.append(name) | ||||||
|  |                 map(sys.modules.__delitem__, del_modules) | ||||||
|  |  | ||||||
|  |             # Run on the build directory for 2to3-built code | ||||||
|  |             # This will prevent the old 2.x code from being found | ||||||
|  |             # by py.test discovery mechanism, that apparently | ||||||
|  |             # ignores sys.path.. | ||||||
|  |             ei_cmd = self.get_finalized_command("egg_info") | ||||||
|  |             self.test_args = [normalize_path(ei_cmd.egg_base)] | ||||||
|  |  | ||||||
|  |         cmd_args = self.test_args + ([self.pytest_args] if self.pytest_args else []) | ||||||
|  |         errno = pytest.main(cmd_args) | ||||||
|  |         sys.exit(errno) | ||||||
|  |  | ||||||
|  |  | ||||||
| # Dirty hack to get version number from monogengine/__init__.py - we can't | # Dirty hack to get version number from monogengine/__init__.py - we can't | ||||||
| # import it as it depends on PyMongo and PyMongo isn't installed until this | # import it as it depends on PyMongo and PyMongo isn't installed until this | ||||||
| # file is read | # file is read | ||||||
| init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py') | init = os.path.join(os.path.dirname(__file__), "mongoengine", "__init__.py") | ||||||
| version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0] | version_line = list(filter(lambda l: l.startswith("VERSION"), open(init)))[0] | ||||||
|  |  | ||||||
| VERSION = get_version(eval(version_line.split('=')[-1])) | VERSION = get_version(eval(version_line.split("=")[-1])) | ||||||
|  |  | ||||||
| CLASSIFIERS = [ | CLASSIFIERS = [ | ||||||
|     'Development Status :: 4 - Beta', |     "Development Status :: 5 - Production/Stable", | ||||||
|     'Intended Audience :: Developers', |     "Intended Audience :: Developers", | ||||||
|     'License :: OSI Approved :: MIT License', |     "License :: OSI Approved :: MIT License", | ||||||
|     'Operating System :: OS Independent', |     "Operating System :: OS Independent", | ||||||
|     'Programming Language :: Python', |     "Programming Language :: Python", | ||||||
|     "Programming Language :: Python :: 2", |  | ||||||
|     "Programming Language :: Python :: 2.7", |  | ||||||
|     "Programming Language :: Python :: 3", |     "Programming Language :: Python :: 3", | ||||||
|     "Programming Language :: Python :: 3.5", |     "Programming Language :: Python :: 3.5", | ||||||
|     "Programming Language :: Python :: 3.6", |     "Programming Language :: Python :: 3.6", | ||||||
|  |     "Programming Language :: Python :: 3.7", | ||||||
|  |     "Programming Language :: Python :: 3.8", | ||||||
|     "Programming Language :: Python :: Implementation :: CPython", |     "Programming Language :: Python :: Implementation :: CPython", | ||||||
|     "Programming Language :: Python :: Implementation :: PyPy", |     "Programming Language :: Python :: Implementation :: PyPy", | ||||||
|     'Topic :: Database', |     "Topic :: Database", | ||||||
|     'Topic :: Software Development :: Libraries :: Python Modules', |     "Topic :: Software Development :: Libraries :: Python Modules", | ||||||
| ] | ] | ||||||
|  |  | ||||||
| extra_opts = { | extra_opts = { | ||||||
|     'packages': find_packages(exclude=['tests', 'tests.*']), |     "packages": find_packages(exclude=["tests", "tests.*"]), | ||||||
|     'tests_require': ['nose', 'coverage==4.2', 'blinker', 'Pillow>=2.0.0'] |     "tests_require": [ | ||||||
|  |         "pytest<5.0", | ||||||
|  |         "pytest-cov", | ||||||
|  |         "coverage<5.0",  # recent coverage switched to sqlite format for the .coverage file which isn't handled properly by coveralls | ||||||
|  |         "blinker", | ||||||
|  |         "Pillow>=2.0.0, <7.0.0",  # 7.0.0 dropped Python2 support | ||||||
|  |     ], | ||||||
| } | } | ||||||
| if sys.version_info[0] == 3: |  | ||||||
|     extra_opts['use_2to3'] = True | if "test" in sys.argv: | ||||||
|     if 'test' in sys.argv or 'nosetests' in sys.argv: |     extra_opts["packages"] = find_packages() | ||||||
|         extra_opts['packages'] = find_packages() |     extra_opts["package_data"] = { | ||||||
|         extra_opts['package_data'] = { |         "tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"] | ||||||
|             'tests': ['fields/mongoengine.png', 'fields/mongodb_leaf.png']} |     } | ||||||
| else: |  | ||||||
|     extra_opts['tests_require'] += ['python-dateutil'] |  | ||||||
|  |  | ||||||
| setup( | setup( | ||||||
|     name='mongoengine', |     name="mongoengine", | ||||||
|     version=VERSION, |     version=VERSION, | ||||||
|     author='Harry Marr', |     author="Harry Marr", | ||||||
|     author_email='harry.marr@gmail.com', |     author_email="harry.marr@gmail.com", | ||||||
|     maintainer="Stefan Wojcik", |     maintainer="Stefan Wojcik", | ||||||
|     maintainer_email="wojcikstefan@gmail.com", |     maintainer_email="wojcikstefan@gmail.com", | ||||||
|     url='http://mongoengine.org/', |     url="http://mongoengine.org/", | ||||||
|     download_url='https://github.com/MongoEngine/mongoengine/tarball/master', |     download_url="https://github.com/MongoEngine/mongoengine/tarball/master", | ||||||
|     license='MIT', |     license="MIT", | ||||||
|     include_package_data=True, |     include_package_data=True, | ||||||
|     description=DESCRIPTION, |     description=DESCRIPTION, | ||||||
|     long_description=LONG_DESCRIPTION, |     long_description=LONG_DESCRIPTION, | ||||||
|     platforms=['any'], |     platforms=["any"], | ||||||
|     classifiers=CLASSIFIERS, |     classifiers=CLASSIFIERS, | ||||||
|     install_requires=['pymongo>=3.4', 'six'], |     python_requires=">=3.5", | ||||||
|     test_suite='nose.collector', |     install_requires=["pymongo>=3.4, <4.0"], | ||||||
|  |     cmdclass={"test": PyTest}, | ||||||
|     **extra_opts |     **extra_opts | ||||||
| ) | ) | ||||||
|   | |||||||
| @@ -1,4 +0,0 @@ | |||||||
| from .all_warnings import AllWarnings |  | ||||||
| from .document import * |  | ||||||
| from .queryset import * |  | ||||||
| from .fields import * |  | ||||||
|   | |||||||
| @@ -1,42 +0,0 @@ | |||||||
| """ |  | ||||||
| This test has been put into a module.  This is because it tests warnings that |  | ||||||
| only get triggered on first hit.  This way we can ensure its imported into the |  | ||||||
| top level and called first by the test suite. |  | ||||||
| """ |  | ||||||
| import unittest |  | ||||||
| import warnings |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
|  |  | ||||||
|  |  | ||||||
| __all__ = ('AllWarnings', ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class AllWarnings(unittest.TestCase): |  | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         connect(db='mongoenginetest') |  | ||||||
|         self.warning_list = [] |  | ||||||
|         self.showwarning_default = warnings.showwarning |  | ||||||
|         warnings.showwarning = self.append_to_warning_list |  | ||||||
|  |  | ||||||
|     def append_to_warning_list(self, message, category, *args): |  | ||||||
|         self.warning_list.append({"message": message, |  | ||||||
|                                   "category": category}) |  | ||||||
|  |  | ||||||
|     def tearDown(self): |  | ||||||
|         # restore default handling of warnings |  | ||||||
|         warnings.showwarning = self.showwarning_default |  | ||||||
|  |  | ||||||
|     def test_document_collection_syntax_warning(self): |  | ||||||
|  |  | ||||||
|         class NonAbstractBase(Document): |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|  |  | ||||||
|         class InheritedDocumentFailTest(NonAbstractBase): |  | ||||||
|             meta = {'collection': 'fail'} |  | ||||||
|  |  | ||||||
|         warning = self.warning_list[0] |  | ||||||
|         self.assertEqual(SyntaxWarning, warning["category"]) |  | ||||||
|         self.assertEqual('non_abstract_base', |  | ||||||
|                          InheritedDocumentFailTest._get_collection_name()) |  | ||||||
|   | |||||||
							
								
								
									
										35
									
								
								tests/all_warnings/test_warnings.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										35
									
								
								tests/all_warnings/test_warnings.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,35 @@ | |||||||
|  | """ | ||||||
|  | This test has been put into a module.  This is because it tests warnings that | ||||||
|  | only get triggered on first hit.  This way we can ensure its imported into the | ||||||
|  | top level and called first by the test suite. | ||||||
|  | """ | ||||||
|  | import unittest | ||||||
|  | import warnings | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestAllWarnings(unittest.TestCase): | ||||||
|  |     def setUp(self): | ||||||
|  |         connect(db="mongoenginetest") | ||||||
|  |         self.warning_list = [] | ||||||
|  |         self.showwarning_default = warnings.showwarning | ||||||
|  |         warnings.showwarning = self.append_to_warning_list | ||||||
|  |  | ||||||
|  |     def append_to_warning_list(self, message, category, *args): | ||||||
|  |         self.warning_list.append({"message": message, "category": category}) | ||||||
|  |  | ||||||
|  |     def tearDown(self): | ||||||
|  |         # restore default handling of warnings | ||||||
|  |         warnings.showwarning = self.showwarning_default | ||||||
|  |  | ||||||
|  |     def test_document_collection_syntax_warning(self): | ||||||
|  |         class NonAbstractBase(Document): | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         class InheritedDocumentFailTest(NonAbstractBase): | ||||||
|  |             meta = {"collection": "fail"} | ||||||
|  |  | ||||||
|  |         warning = self.warning_list[0] | ||||||
|  |         assert SyntaxWarning == warning["category"] | ||||||
|  |         assert "non_abstract_base" == InheritedDocumentFailTest._get_collection_name() | ||||||
| @@ -1,13 +0,0 @@ | |||||||
| import unittest |  | ||||||
|  |  | ||||||
| from .class_methods import * |  | ||||||
| from .delta import * |  | ||||||
| from .dynamic import * |  | ||||||
| from .indexes import * |  | ||||||
| from .inheritance import * |  | ||||||
| from .instance import * |  | ||||||
| from .json_serialisation import * |  | ||||||
| from .validation import * |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     unittest.main() |  | ||||||
|   | |||||||
| @@ -1,864 +0,0 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import unittest |  | ||||||
|  |  | ||||||
| from bson import SON |  | ||||||
| from mongoengine import * |  | ||||||
| from mongoengine.pymongo_support import list_collection_names |  | ||||||
| from tests.utils import MongoDBTestCase |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class DeltaTest(MongoDBTestCase): |  | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         super(DeltaTest, self).setUp() |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             age = IntField() |  | ||||||
|  |  | ||||||
|             non_field = True |  | ||||||
|  |  | ||||||
|             meta = {"allow_inheritance": True} |  | ||||||
|  |  | ||||||
|         self.Person = Person |  | ||||||
|  |  | ||||||
|     def tearDown(self): |  | ||||||
|         for collection in list_collection_names(self.db): |  | ||||||
|             self.db.drop_collection(collection) |  | ||||||
|  |  | ||||||
|     def test_delta(self): |  | ||||||
|         self.delta(Document) |  | ||||||
|         self.delta(DynamicDocument) |  | ||||||
|  |  | ||||||
|     def delta(self, DocClass): |  | ||||||
|  |  | ||||||
|         class Doc(DocClass): |  | ||||||
|             string_field = StringField() |  | ||||||
|             int_field = IntField() |  | ||||||
|             dict_field = DictField() |  | ||||||
|             list_field = ListField() |  | ||||||
|  |  | ||||||
|         Doc.drop_collection() |  | ||||||
|         doc = Doc() |  | ||||||
|         doc.save() |  | ||||||
|  |  | ||||||
|         doc = Doc.objects.first() |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), []) |  | ||||||
|         self.assertEqual(doc._delta(), ({}, {})) |  | ||||||
|  |  | ||||||
|         doc.string_field = 'hello' |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), ['string_field']) |  | ||||||
|         self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {})) |  | ||||||
|  |  | ||||||
|         doc._changed_fields = [] |  | ||||||
|         doc.int_field = 1 |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), ['int_field']) |  | ||||||
|         self.assertEqual(doc._delta(), ({'int_field': 1}, {})) |  | ||||||
|  |  | ||||||
|         doc._changed_fields = [] |  | ||||||
|         dict_value = {'hello': 'world', 'ping': 'pong'} |  | ||||||
|         doc.dict_field = dict_value |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), ['dict_field']) |  | ||||||
|         self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {})) |  | ||||||
|  |  | ||||||
|         doc._changed_fields = [] |  | ||||||
|         list_value = ['1', 2, {'hello': 'world'}] |  | ||||||
|         doc.list_field = list_value |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), ['list_field']) |  | ||||||
|         self.assertEqual(doc._delta(), ({'list_field': list_value}, {})) |  | ||||||
|  |  | ||||||
|         # Test unsetting |  | ||||||
|         doc._changed_fields = [] |  | ||||||
|         doc.dict_field = {} |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), ['dict_field']) |  | ||||||
|         self.assertEqual(doc._delta(), ({}, {'dict_field': 1})) |  | ||||||
|  |  | ||||||
|         doc._changed_fields = [] |  | ||||||
|         doc.list_field = [] |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), ['list_field']) |  | ||||||
|         self.assertEqual(doc._delta(), ({}, {'list_field': 1})) |  | ||||||
|  |  | ||||||
|     def test_delta_recursive(self): |  | ||||||
|         self.delta_recursive(Document, EmbeddedDocument) |  | ||||||
|         self.delta_recursive(DynamicDocument, EmbeddedDocument) |  | ||||||
|         self.delta_recursive(Document, DynamicEmbeddedDocument) |  | ||||||
|         self.delta_recursive(DynamicDocument, DynamicEmbeddedDocument) |  | ||||||
|  |  | ||||||
|     def delta_recursive(self, DocClass, EmbeddedClass): |  | ||||||
|  |  | ||||||
|         class Embedded(EmbeddedClass): |  | ||||||
|             id = StringField() |  | ||||||
|             string_field = StringField() |  | ||||||
|             int_field = IntField() |  | ||||||
|             dict_field = DictField() |  | ||||||
|             list_field = ListField() |  | ||||||
|  |  | ||||||
|         class Doc(DocClass): |  | ||||||
|             string_field = StringField() |  | ||||||
|             int_field = IntField() |  | ||||||
|             dict_field = DictField() |  | ||||||
|             list_field = ListField() |  | ||||||
|             embedded_field = EmbeddedDocumentField(Embedded) |  | ||||||
|  |  | ||||||
|         Doc.drop_collection() |  | ||||||
|         doc = Doc() |  | ||||||
|         doc.save() |  | ||||||
|  |  | ||||||
|         doc = Doc.objects.first() |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), []) |  | ||||||
|         self.assertEqual(doc._delta(), ({}, {})) |  | ||||||
|  |  | ||||||
|         embedded_1 = Embedded() |  | ||||||
|         embedded_1.id = "010101" |  | ||||||
|         embedded_1.string_field = 'hello' |  | ||||||
|         embedded_1.int_field = 1 |  | ||||||
|         embedded_1.dict_field = {'hello': 'world'} |  | ||||||
|         embedded_1.list_field = ['1', 2, {'hello': 'world'}] |  | ||||||
|         doc.embedded_field = embedded_1 |  | ||||||
|  |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), ['embedded_field']) |  | ||||||
|  |  | ||||||
|         embedded_delta = { |  | ||||||
|             'id': "010101", |  | ||||||
|             'string_field': 'hello', |  | ||||||
|             'int_field': 1, |  | ||||||
|             'dict_field': {'hello': 'world'}, |  | ||||||
|             'list_field': ['1', 2, {'hello': 'world'}] |  | ||||||
|         } |  | ||||||
|         self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {})) |  | ||||||
|         self.assertEqual(doc._delta(), |  | ||||||
|                          ({'embedded_field': embedded_delta}, {})) |  | ||||||
|  |  | ||||||
|         doc.save() |  | ||||||
|         doc = doc.reload(10) |  | ||||||
|  |  | ||||||
|         doc.embedded_field.dict_field = {} |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), |  | ||||||
|                          ['embedded_field.dict_field']) |  | ||||||
|         self.assertEqual(doc.embedded_field._delta(), ({}, {'dict_field': 1})) |  | ||||||
|         self.assertEqual(doc._delta(), ({}, {'embedded_field.dict_field': 1})) |  | ||||||
|         doc.save() |  | ||||||
|         doc = doc.reload(10) |  | ||||||
|         self.assertEqual(doc.embedded_field.dict_field, {}) |  | ||||||
|  |  | ||||||
|         doc.embedded_field.list_field = [] |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), |  | ||||||
|                          ['embedded_field.list_field']) |  | ||||||
|         self.assertEqual(doc.embedded_field._delta(), ({}, {'list_field': 1})) |  | ||||||
|         self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field': 1})) |  | ||||||
|         doc.save() |  | ||||||
|         doc = doc.reload(10) |  | ||||||
|         self.assertEqual(doc.embedded_field.list_field, []) |  | ||||||
|  |  | ||||||
|         embedded_2 = Embedded() |  | ||||||
|         embedded_2.string_field = 'hello' |  | ||||||
|         embedded_2.int_field = 1 |  | ||||||
|         embedded_2.dict_field = {'hello': 'world'} |  | ||||||
|         embedded_2.list_field = ['1', 2, {'hello': 'world'}] |  | ||||||
|  |  | ||||||
|         doc.embedded_field.list_field = ['1', 2, embedded_2] |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), |  | ||||||
|                          ['embedded_field.list_field']) |  | ||||||
|  |  | ||||||
|         self.assertEqual(doc.embedded_field._delta(), ({ |  | ||||||
|             'list_field': ['1', 2, { |  | ||||||
|                 '_cls': 'Embedded', |  | ||||||
|                 'string_field': 'hello', |  | ||||||
|                 'dict_field': {'hello': 'world'}, |  | ||||||
|                 'int_field': 1, |  | ||||||
|                 'list_field': ['1', 2, {'hello': 'world'}], |  | ||||||
|             }] |  | ||||||
|         }, {})) |  | ||||||
|  |  | ||||||
|         self.assertEqual(doc._delta(), ({ |  | ||||||
|             'embedded_field.list_field': ['1', 2, { |  | ||||||
|                 '_cls': 'Embedded', |  | ||||||
|                 'string_field': 'hello', |  | ||||||
|                 'dict_field': {'hello': 'world'}, |  | ||||||
|                 'int_field': 1, |  | ||||||
|                 'list_field': ['1', 2, {'hello': 'world'}], |  | ||||||
|             }] |  | ||||||
|         }, {})) |  | ||||||
|         doc.save() |  | ||||||
|         doc = doc.reload(10) |  | ||||||
|  |  | ||||||
|         self.assertEqual(doc.embedded_field.list_field[0], '1') |  | ||||||
|         self.assertEqual(doc.embedded_field.list_field[1], 2) |  | ||||||
|         for k in doc.embedded_field.list_field[2]._fields: |  | ||||||
|             self.assertEqual(doc.embedded_field.list_field[2][k], |  | ||||||
|                              embedded_2[k]) |  | ||||||
|  |  | ||||||
|         doc.embedded_field.list_field[2].string_field = 'world' |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), |  | ||||||
|                          ['embedded_field.list_field.2.string_field']) |  | ||||||
|         self.assertEqual(doc.embedded_field._delta(), |  | ||||||
|                          ({'list_field.2.string_field': 'world'}, {})) |  | ||||||
|         self.assertEqual(doc._delta(), |  | ||||||
|                          ({'embedded_field.list_field.2.string_field': 'world'}, {})) |  | ||||||
|         doc.save() |  | ||||||
|         doc = doc.reload(10) |  | ||||||
|         self.assertEqual(doc.embedded_field.list_field[2].string_field, |  | ||||||
|                          'world') |  | ||||||
|  |  | ||||||
|         # Test multiple assignments |  | ||||||
|         doc.embedded_field.list_field[2].string_field = 'hello world' |  | ||||||
|         doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), |  | ||||||
|                          ['embedded_field.list_field.2']) |  | ||||||
|         self.assertEqual(doc.embedded_field._delta(), ({'list_field.2': { |  | ||||||
|                 '_cls': 'Embedded', |  | ||||||
|                 'string_field': 'hello world', |  | ||||||
|                 'int_field': 1, |  | ||||||
|                 'list_field': ['1', 2, {'hello': 'world'}], |  | ||||||
|                 'dict_field': {'hello': 'world'}} |  | ||||||
|             }, {})) |  | ||||||
|         self.assertEqual(doc._delta(), ({'embedded_field.list_field.2': { |  | ||||||
|                 '_cls': 'Embedded', |  | ||||||
|                 'string_field': 'hello world', |  | ||||||
|                 'int_field': 1, |  | ||||||
|                 'list_field': ['1', 2, {'hello': 'world'}], |  | ||||||
|                 'dict_field': {'hello': 'world'}} |  | ||||||
|             }, {})) |  | ||||||
|         doc.save() |  | ||||||
|         doc = doc.reload(10) |  | ||||||
|         self.assertEqual(doc.embedded_field.list_field[2].string_field, |  | ||||||
|                          'hello world') |  | ||||||
|  |  | ||||||
|         # Test list native methods |  | ||||||
|         doc.embedded_field.list_field[2].list_field.pop(0) |  | ||||||
|         self.assertEqual(doc._delta(), |  | ||||||
|                          ({'embedded_field.list_field.2.list_field': |  | ||||||
|                           [2, {'hello': 'world'}]}, {})) |  | ||||||
|         doc.save() |  | ||||||
|         doc = doc.reload(10) |  | ||||||
|  |  | ||||||
|         doc.embedded_field.list_field[2].list_field.append(1) |  | ||||||
|         self.assertEqual(doc._delta(), |  | ||||||
|                          ({'embedded_field.list_field.2.list_field': |  | ||||||
|                           [2, {'hello': 'world'}, 1]}, {})) |  | ||||||
|         doc.save() |  | ||||||
|         doc = doc.reload(10) |  | ||||||
|         self.assertEqual(doc.embedded_field.list_field[2].list_field, |  | ||||||
|                          [2, {'hello': 'world'}, 1]) |  | ||||||
|  |  | ||||||
|         doc.embedded_field.list_field[2].list_field.sort(key=str) |  | ||||||
|         doc.save() |  | ||||||
|         doc = doc.reload(10) |  | ||||||
|         self.assertEqual(doc.embedded_field.list_field[2].list_field, |  | ||||||
|                          [1, 2, {'hello': 'world'}]) |  | ||||||
|  |  | ||||||
|         del doc.embedded_field.list_field[2].list_field[2]['hello'] |  | ||||||
|         self.assertEqual(doc._delta(), |  | ||||||
|                          ({}, {'embedded_field.list_field.2.list_field.2.hello': 1})) |  | ||||||
|         doc.save() |  | ||||||
|         doc = doc.reload(10) |  | ||||||
|  |  | ||||||
|         del doc.embedded_field.list_field[2].list_field |  | ||||||
|         self.assertEqual(doc._delta(), |  | ||||||
|                          ({}, {'embedded_field.list_field.2.list_field': 1})) |  | ||||||
|  |  | ||||||
|         doc.save() |  | ||||||
|         doc = doc.reload(10) |  | ||||||
|  |  | ||||||
|         doc.dict_field['Embedded'] = embedded_1 |  | ||||||
|         doc.save() |  | ||||||
|         doc = doc.reload(10) |  | ||||||
|  |  | ||||||
|         doc.dict_field['Embedded'].string_field = 'Hello World' |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), |  | ||||||
|                          ['dict_field.Embedded.string_field']) |  | ||||||
|         self.assertEqual(doc._delta(), |  | ||||||
|                          ({'dict_field.Embedded.string_field': 'Hello World'}, {})) |  | ||||||
|  |  | ||||||
|     def test_circular_reference_deltas(self): |  | ||||||
|         self.circular_reference_deltas(Document, Document) |  | ||||||
|         self.circular_reference_deltas(Document, DynamicDocument) |  | ||||||
|         self.circular_reference_deltas(DynamicDocument, Document) |  | ||||||
|         self.circular_reference_deltas(DynamicDocument, DynamicDocument) |  | ||||||
|  |  | ||||||
|     def circular_reference_deltas(self, DocClass1, DocClass2): |  | ||||||
|  |  | ||||||
|         class Person(DocClass1): |  | ||||||
|             name = StringField() |  | ||||||
|             owns = ListField(ReferenceField('Organization')) |  | ||||||
|  |  | ||||||
|         class Organization(DocClass2): |  | ||||||
|             name = StringField() |  | ||||||
|             owner = ReferenceField('Person') |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|         Organization.drop_collection() |  | ||||||
|  |  | ||||||
|         person = Person(name="owner").save() |  | ||||||
|         organization = Organization(name="company").save() |  | ||||||
|  |  | ||||||
|         person.owns.append(organization) |  | ||||||
|         organization.owner = person |  | ||||||
|  |  | ||||||
|         person.save() |  | ||||||
|         organization.save() |  | ||||||
|  |  | ||||||
|         p = Person.objects[0].select_related() |  | ||||||
|         o = Organization.objects.first() |  | ||||||
|         self.assertEqual(p.owns[0], o) |  | ||||||
|         self.assertEqual(o.owner, p) |  | ||||||
|  |  | ||||||
|     def test_circular_reference_deltas_2(self): |  | ||||||
|         self.circular_reference_deltas_2(Document, Document) |  | ||||||
|         self.circular_reference_deltas_2(Document, DynamicDocument) |  | ||||||
|         self.circular_reference_deltas_2(DynamicDocument, Document) |  | ||||||
|         self.circular_reference_deltas_2(DynamicDocument, DynamicDocument) |  | ||||||
|  |  | ||||||
|     def circular_reference_deltas_2(self, DocClass1, DocClass2, dbref=True): |  | ||||||
|  |  | ||||||
|         class Person(DocClass1): |  | ||||||
|             name = StringField() |  | ||||||
|             owns = ListField(ReferenceField('Organization', dbref=dbref)) |  | ||||||
|             employer = ReferenceField('Organization', dbref=dbref) |  | ||||||
|  |  | ||||||
|         class Organization(DocClass2): |  | ||||||
|             name = StringField() |  | ||||||
|             owner = ReferenceField('Person', dbref=dbref) |  | ||||||
|             employees = ListField(ReferenceField('Person', dbref=dbref)) |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|         Organization.drop_collection() |  | ||||||
|  |  | ||||||
|         person = Person(name="owner").save() |  | ||||||
|         employee = Person(name="employee").save() |  | ||||||
|         organization = Organization(name="company").save() |  | ||||||
|  |  | ||||||
|         person.owns.append(organization) |  | ||||||
|         organization.owner = person |  | ||||||
|  |  | ||||||
|         organization.employees.append(employee) |  | ||||||
|         employee.employer = organization |  | ||||||
|  |  | ||||||
|         person.save() |  | ||||||
|         organization.save() |  | ||||||
|         employee.save() |  | ||||||
|  |  | ||||||
|         p = Person.objects.get(name="owner") |  | ||||||
|         e = Person.objects.get(name="employee") |  | ||||||
|         o = Organization.objects.first() |  | ||||||
|  |  | ||||||
|         self.assertEqual(p.owns[0], o) |  | ||||||
|         self.assertEqual(o.owner, p) |  | ||||||
|         self.assertEqual(e.employer, o) |  | ||||||
|  |  | ||||||
|         return person, organization, employee |  | ||||||
|  |  | ||||||
|     def test_delta_db_field(self): |  | ||||||
|         self.delta_db_field(Document) |  | ||||||
|         self.delta_db_field(DynamicDocument) |  | ||||||
|  |  | ||||||
|     def delta_db_field(self, DocClass): |  | ||||||
|  |  | ||||||
|         class Doc(DocClass): |  | ||||||
|             string_field = StringField(db_field='db_string_field') |  | ||||||
|             int_field = IntField(db_field='db_int_field') |  | ||||||
|             dict_field = DictField(db_field='db_dict_field') |  | ||||||
|             list_field = ListField(db_field='db_list_field') |  | ||||||
|  |  | ||||||
|         Doc.drop_collection() |  | ||||||
|         doc = Doc() |  | ||||||
|         doc.save() |  | ||||||
|  |  | ||||||
|         doc = Doc.objects.first() |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), []) |  | ||||||
|         self.assertEqual(doc._delta(), ({}, {})) |  | ||||||
|  |  | ||||||
|         doc.string_field = 'hello' |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), ['db_string_field']) |  | ||||||
|         self.assertEqual(doc._delta(), ({'db_string_field': 'hello'}, {})) |  | ||||||
|  |  | ||||||
|         doc._changed_fields = [] |  | ||||||
|         doc.int_field = 1 |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), ['db_int_field']) |  | ||||||
|         self.assertEqual(doc._delta(), ({'db_int_field': 1}, {})) |  | ||||||
|  |  | ||||||
|         doc._changed_fields = [] |  | ||||||
|         dict_value = {'hello': 'world', 'ping': 'pong'} |  | ||||||
|         doc.dict_field = dict_value |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), ['db_dict_field']) |  | ||||||
|         self.assertEqual(doc._delta(), ({'db_dict_field': dict_value}, {})) |  | ||||||
|  |  | ||||||
|         doc._changed_fields = [] |  | ||||||
|         list_value = ['1', 2, {'hello': 'world'}] |  | ||||||
|         doc.list_field = list_value |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), ['db_list_field']) |  | ||||||
|         self.assertEqual(doc._delta(), ({'db_list_field': list_value}, {})) |  | ||||||
|  |  | ||||||
|         # Test unsetting |  | ||||||
|         doc._changed_fields = [] |  | ||||||
|         doc.dict_field = {} |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), ['db_dict_field']) |  | ||||||
|         self.assertEqual(doc._delta(), ({}, {'db_dict_field': 1})) |  | ||||||
|  |  | ||||||
|         doc._changed_fields = [] |  | ||||||
|         doc.list_field = [] |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), ['db_list_field']) |  | ||||||
|         self.assertEqual(doc._delta(), ({}, {'db_list_field': 1})) |  | ||||||
|  |  | ||||||
|         # Test it saves that data |  | ||||||
|         doc = Doc() |  | ||||||
|         doc.save() |  | ||||||
|  |  | ||||||
|         doc.string_field = 'hello' |  | ||||||
|         doc.int_field = 1 |  | ||||||
|         doc.dict_field = {'hello': 'world'} |  | ||||||
|         doc.list_field = ['1', 2, {'hello': 'world'}] |  | ||||||
|         doc.save() |  | ||||||
|         doc = doc.reload(10) |  | ||||||
|  |  | ||||||
|         self.assertEqual(doc.string_field, 'hello') |  | ||||||
|         self.assertEqual(doc.int_field, 1) |  | ||||||
|         self.assertEqual(doc.dict_field, {'hello': 'world'}) |  | ||||||
|         self.assertEqual(doc.list_field, ['1', 2, {'hello': 'world'}]) |  | ||||||
|  |  | ||||||
|     def test_delta_recursive_db_field(self): |  | ||||||
|         self.delta_recursive_db_field(Document, EmbeddedDocument) |  | ||||||
|         self.delta_recursive_db_field(Document, DynamicEmbeddedDocument) |  | ||||||
|         self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument) |  | ||||||
|         self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument) |  | ||||||
|  |  | ||||||
|     def delta_recursive_db_field(self, DocClass, EmbeddedClass): |  | ||||||
|  |  | ||||||
|         class Embedded(EmbeddedClass): |  | ||||||
|             string_field = StringField(db_field='db_string_field') |  | ||||||
|             int_field = IntField(db_field='db_int_field') |  | ||||||
|             dict_field = DictField(db_field='db_dict_field') |  | ||||||
|             list_field = ListField(db_field='db_list_field') |  | ||||||
|  |  | ||||||
|         class Doc(DocClass): |  | ||||||
|             string_field = StringField(db_field='db_string_field') |  | ||||||
|             int_field = IntField(db_field='db_int_field') |  | ||||||
|             dict_field = DictField(db_field='db_dict_field') |  | ||||||
|             list_field = ListField(db_field='db_list_field') |  | ||||||
|             embedded_field = EmbeddedDocumentField(Embedded, |  | ||||||
|                                     db_field='db_embedded_field') |  | ||||||
|  |  | ||||||
|         Doc.drop_collection() |  | ||||||
|         doc = Doc() |  | ||||||
|         doc.save() |  | ||||||
|  |  | ||||||
|         doc = Doc.objects.first() |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), []) |  | ||||||
|         self.assertEqual(doc._delta(), ({}, {})) |  | ||||||
|  |  | ||||||
|         embedded_1 = Embedded() |  | ||||||
|         embedded_1.string_field = 'hello' |  | ||||||
|         embedded_1.int_field = 1 |  | ||||||
|         embedded_1.dict_field = {'hello': 'world'} |  | ||||||
|         embedded_1.list_field = ['1', 2, {'hello': 'world'}] |  | ||||||
|         doc.embedded_field = embedded_1 |  | ||||||
|  |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), ['db_embedded_field']) |  | ||||||
|  |  | ||||||
|         embedded_delta = { |  | ||||||
|             'db_string_field': 'hello', |  | ||||||
|             'db_int_field': 1, |  | ||||||
|             'db_dict_field': {'hello': 'world'}, |  | ||||||
|             'db_list_field': ['1', 2, {'hello': 'world'}] |  | ||||||
|         } |  | ||||||
|         self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {})) |  | ||||||
|         self.assertEqual(doc._delta(), |  | ||||||
|             ({'db_embedded_field': embedded_delta}, {})) |  | ||||||
|  |  | ||||||
|         doc.save() |  | ||||||
|         doc = doc.reload(10) |  | ||||||
|  |  | ||||||
|         doc.embedded_field.dict_field = {} |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), |  | ||||||
|             ['db_embedded_field.db_dict_field']) |  | ||||||
|         self.assertEqual(doc.embedded_field._delta(), |  | ||||||
|             ({}, {'db_dict_field': 1})) |  | ||||||
|         self.assertEqual(doc._delta(), |  | ||||||
|             ({}, {'db_embedded_field.db_dict_field': 1})) |  | ||||||
|         doc.save() |  | ||||||
|         doc = doc.reload(10) |  | ||||||
|         self.assertEqual(doc.embedded_field.dict_field, {}) |  | ||||||
|  |  | ||||||
|         doc.embedded_field.list_field = [] |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), |  | ||||||
|             ['db_embedded_field.db_list_field']) |  | ||||||
|         self.assertEqual(doc.embedded_field._delta(), |  | ||||||
|             ({}, {'db_list_field': 1})) |  | ||||||
|         self.assertEqual(doc._delta(), |  | ||||||
|             ({}, {'db_embedded_field.db_list_field': 1})) |  | ||||||
|         doc.save() |  | ||||||
|         doc = doc.reload(10) |  | ||||||
|         self.assertEqual(doc.embedded_field.list_field, []) |  | ||||||
|  |  | ||||||
|         embedded_2 = Embedded() |  | ||||||
|         embedded_2.string_field = 'hello' |  | ||||||
|         embedded_2.int_field = 1 |  | ||||||
|         embedded_2.dict_field = {'hello': 'world'} |  | ||||||
|         embedded_2.list_field = ['1', 2, {'hello': 'world'}] |  | ||||||
|  |  | ||||||
|         doc.embedded_field.list_field = ['1', 2, embedded_2] |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), |  | ||||||
|             ['db_embedded_field.db_list_field']) |  | ||||||
|         self.assertEqual(doc.embedded_field._delta(), ({ |  | ||||||
|             'db_list_field': ['1', 2, { |  | ||||||
|                 '_cls': 'Embedded', |  | ||||||
|                 'db_string_field': 'hello', |  | ||||||
|                 'db_dict_field': {'hello': 'world'}, |  | ||||||
|                 'db_int_field': 1, |  | ||||||
|                 'db_list_field': ['1', 2, {'hello': 'world'}], |  | ||||||
|             }] |  | ||||||
|         }, {})) |  | ||||||
|  |  | ||||||
|         self.assertEqual(doc._delta(), ({ |  | ||||||
|             'db_embedded_field.db_list_field': ['1', 2, { |  | ||||||
|                 '_cls': 'Embedded', |  | ||||||
|                 'db_string_field': 'hello', |  | ||||||
|                 'db_dict_field': {'hello': 'world'}, |  | ||||||
|                 'db_int_field': 1, |  | ||||||
|                 'db_list_field': ['1', 2, {'hello': 'world'}], |  | ||||||
|             }] |  | ||||||
|         }, {})) |  | ||||||
|         doc.save() |  | ||||||
|         doc = doc.reload(10) |  | ||||||
|  |  | ||||||
|         self.assertEqual(doc.embedded_field.list_field[0], '1') |  | ||||||
|         self.assertEqual(doc.embedded_field.list_field[1], 2) |  | ||||||
|         for k in doc.embedded_field.list_field[2]._fields: |  | ||||||
|             self.assertEqual(doc.embedded_field.list_field[2][k], |  | ||||||
|                              embedded_2[k]) |  | ||||||
|  |  | ||||||
|         doc.embedded_field.list_field[2].string_field = 'world' |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), |  | ||||||
|             ['db_embedded_field.db_list_field.2.db_string_field']) |  | ||||||
|         self.assertEqual(doc.embedded_field._delta(), |  | ||||||
|             ({'db_list_field.2.db_string_field': 'world'}, {})) |  | ||||||
|         self.assertEqual(doc._delta(), |  | ||||||
|             ({'db_embedded_field.db_list_field.2.db_string_field': 'world'}, |  | ||||||
|              {})) |  | ||||||
|         doc.save() |  | ||||||
|         doc = doc.reload(10) |  | ||||||
|         self.assertEqual(doc.embedded_field.list_field[2].string_field, |  | ||||||
|                         'world') |  | ||||||
|  |  | ||||||
|         # Test multiple assignments |  | ||||||
|         doc.embedded_field.list_field[2].string_field = 'hello world' |  | ||||||
|         doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), |  | ||||||
|             ['db_embedded_field.db_list_field.2']) |  | ||||||
|         self.assertEqual(doc.embedded_field._delta(), ({'db_list_field.2': { |  | ||||||
|             '_cls': 'Embedded', |  | ||||||
|             'db_string_field': 'hello world', |  | ||||||
|             'db_int_field': 1, |  | ||||||
|             'db_list_field': ['1', 2, {'hello': 'world'}], |  | ||||||
|             'db_dict_field': {'hello': 'world'}}}, {})) |  | ||||||
|         self.assertEqual(doc._delta(), ({ |  | ||||||
|             'db_embedded_field.db_list_field.2': { |  | ||||||
|                 '_cls': 'Embedded', |  | ||||||
|                 'db_string_field': 'hello world', |  | ||||||
|                 'db_int_field': 1, |  | ||||||
|                 'db_list_field': ['1', 2, {'hello': 'world'}], |  | ||||||
|                 'db_dict_field': {'hello': 'world'}} |  | ||||||
|             }, {})) |  | ||||||
|         doc.save() |  | ||||||
|         doc = doc.reload(10) |  | ||||||
|         self.assertEqual(doc.embedded_field.list_field[2].string_field, |  | ||||||
|                         'hello world') |  | ||||||
|  |  | ||||||
|         # Test list native methods |  | ||||||
|         doc.embedded_field.list_field[2].list_field.pop(0) |  | ||||||
|         self.assertEqual(doc._delta(), |  | ||||||
|             ({'db_embedded_field.db_list_field.2.db_list_field': |  | ||||||
|                 [2, {'hello': 'world'}]}, {})) |  | ||||||
|         doc.save() |  | ||||||
|         doc = doc.reload(10) |  | ||||||
|  |  | ||||||
|         doc.embedded_field.list_field[2].list_field.append(1) |  | ||||||
|         self.assertEqual(doc._delta(), |  | ||||||
|             ({'db_embedded_field.db_list_field.2.db_list_field': |  | ||||||
|                 [2, {'hello': 'world'}, 1]}, {})) |  | ||||||
|         doc.save() |  | ||||||
|         doc = doc.reload(10) |  | ||||||
|         self.assertEqual(doc.embedded_field.list_field[2].list_field, |  | ||||||
|             [2, {'hello': 'world'}, 1]) |  | ||||||
|  |  | ||||||
|         doc.embedded_field.list_field[2].list_field.sort(key=str) |  | ||||||
|         doc.save() |  | ||||||
|         doc = doc.reload(10) |  | ||||||
|         self.assertEqual(doc.embedded_field.list_field[2].list_field, |  | ||||||
|             [1, 2, {'hello': 'world'}]) |  | ||||||
|  |  | ||||||
|         del doc.embedded_field.list_field[2].list_field[2]['hello'] |  | ||||||
|         self.assertEqual(doc._delta(), |  | ||||||
|             ({}, {'db_embedded_field.db_list_field.2.db_list_field.2.hello': 1})) |  | ||||||
|         doc.save() |  | ||||||
|         doc = doc.reload(10) |  | ||||||
|  |  | ||||||
|         del doc.embedded_field.list_field[2].list_field |  | ||||||
|         self.assertEqual(doc._delta(), ({}, |  | ||||||
|             {'db_embedded_field.db_list_field.2.db_list_field': 1})) |  | ||||||
|  |  | ||||||
|     def test_delta_for_dynamic_documents(self): |  | ||||||
|         class Person(DynamicDocument): |  | ||||||
|             name = StringField() |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         p = Person(name="James", age=34) |  | ||||||
|         self.assertEqual(p._delta(), ( |  | ||||||
|             SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {})) |  | ||||||
|  |  | ||||||
|         p.doc = 123 |  | ||||||
|         del p.doc |  | ||||||
|         self.assertEqual(p._delta(), ( |  | ||||||
|             SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {})) |  | ||||||
|  |  | ||||||
|         p = Person() |  | ||||||
|         p.name = "Dean" |  | ||||||
|         p.age = 22 |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         p.age = 24 |  | ||||||
|         self.assertEqual(p.age, 24) |  | ||||||
|         self.assertEqual(p._get_changed_fields(), ['age']) |  | ||||||
|         self.assertEqual(p._delta(), ({'age': 24}, {})) |  | ||||||
|  |  | ||||||
|         p = Person.objects(age=22).get() |  | ||||||
|         p.age = 24 |  | ||||||
|         self.assertEqual(p.age, 24) |  | ||||||
|         self.assertEqual(p._get_changed_fields(), ['age']) |  | ||||||
|         self.assertEqual(p._delta(), ({'age': 24}, {})) |  | ||||||
|  |  | ||||||
|         p.save() |  | ||||||
|         self.assertEqual(1, Person.objects(age=24).count()) |  | ||||||
|  |  | ||||||
|     def test_dynamic_delta(self): |  | ||||||
|  |  | ||||||
|         class Doc(DynamicDocument): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         Doc.drop_collection() |  | ||||||
|         doc = Doc() |  | ||||||
|         doc.save() |  | ||||||
|  |  | ||||||
|         doc = Doc.objects.first() |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), []) |  | ||||||
|         self.assertEqual(doc._delta(), ({}, {})) |  | ||||||
|  |  | ||||||
|         doc.string_field = 'hello' |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), ['string_field']) |  | ||||||
|         self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {})) |  | ||||||
|  |  | ||||||
|         doc._changed_fields = [] |  | ||||||
|         doc.int_field = 1 |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), ['int_field']) |  | ||||||
|         self.assertEqual(doc._delta(), ({'int_field': 1}, {})) |  | ||||||
|  |  | ||||||
|         doc._changed_fields = [] |  | ||||||
|         dict_value = {'hello': 'world', 'ping': 'pong'} |  | ||||||
|         doc.dict_field = dict_value |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), ['dict_field']) |  | ||||||
|         self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {})) |  | ||||||
|  |  | ||||||
|         doc._changed_fields = [] |  | ||||||
|         list_value = ['1', 2, {'hello': 'world'}] |  | ||||||
|         doc.list_field = list_value |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), ['list_field']) |  | ||||||
|         self.assertEqual(doc._delta(), ({'list_field': list_value}, {})) |  | ||||||
|  |  | ||||||
|         # Test unsetting |  | ||||||
|         doc._changed_fields = [] |  | ||||||
|         doc.dict_field = {} |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), ['dict_field']) |  | ||||||
|         self.assertEqual(doc._delta(), ({}, {'dict_field': 1})) |  | ||||||
|  |  | ||||||
|         doc._changed_fields = [] |  | ||||||
|         doc.list_field = [] |  | ||||||
|         self.assertEqual(doc._get_changed_fields(), ['list_field']) |  | ||||||
|         self.assertEqual(doc._delta(), ({}, {'list_field': 1})) |  | ||||||
|  |  | ||||||
|     def test_delta_with_dbref_true(self): |  | ||||||
|         person, organization, employee = self.circular_reference_deltas_2(Document, Document, True) |  | ||||||
|         employee.name = 'test' |  | ||||||
|  |  | ||||||
|         self.assertEqual(organization._get_changed_fields(), []) |  | ||||||
|  |  | ||||||
|         updates, removals = organization._delta() |  | ||||||
|         self.assertEqual({}, removals) |  | ||||||
|         self.assertEqual({}, updates) |  | ||||||
|  |  | ||||||
|         organization.employees.append(person) |  | ||||||
|         updates, removals = organization._delta() |  | ||||||
|         self.assertEqual({}, removals) |  | ||||||
|         self.assertIn('employees', updates) |  | ||||||
|  |  | ||||||
|     def test_delta_with_dbref_false(self): |  | ||||||
|         person, organization, employee = self.circular_reference_deltas_2(Document, Document, False) |  | ||||||
|         employee.name = 'test' |  | ||||||
|  |  | ||||||
|         self.assertEqual(organization._get_changed_fields(), []) |  | ||||||
|  |  | ||||||
|         updates, removals = organization._delta() |  | ||||||
|         self.assertEqual({}, removals) |  | ||||||
|         self.assertEqual({}, updates) |  | ||||||
|  |  | ||||||
|         organization.employees.append(person) |  | ||||||
|         updates, removals = organization._delta() |  | ||||||
|         self.assertEqual({}, removals) |  | ||||||
|         self.assertIn('employees', updates) |  | ||||||
|  |  | ||||||
|     def test_nested_nested_fields_mark_as_changed(self): |  | ||||||
|         class EmbeddedDoc(EmbeddedDocument): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class MyDoc(Document): |  | ||||||
|             subs = MapField(MapField(EmbeddedDocumentField(EmbeddedDoc))) |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         MyDoc.drop_collection() |  | ||||||
|  |  | ||||||
|         mydoc = MyDoc(name='testcase1', subs={'a': {'b': EmbeddedDoc(name='foo')}}).save() |  | ||||||
|  |  | ||||||
|         mydoc = MyDoc.objects.first() |  | ||||||
|         subdoc = mydoc.subs['a']['b'] |  | ||||||
|         subdoc.name = 'bar' |  | ||||||
|  |  | ||||||
|         self.assertEqual(["name"], subdoc._get_changed_fields()) |  | ||||||
|         self.assertEqual(["subs.a.b.name"], mydoc._get_changed_fields()) |  | ||||||
|  |  | ||||||
|         mydoc._clear_changed_fields() |  | ||||||
|         self.assertEqual([], mydoc._get_changed_fields()) |  | ||||||
|  |  | ||||||
|     def test_lower_level_mark_as_changed(self): |  | ||||||
|         class EmbeddedDoc(EmbeddedDocument): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class MyDoc(Document): |  | ||||||
|             subs = MapField(EmbeddedDocumentField(EmbeddedDoc)) |  | ||||||
|  |  | ||||||
|         MyDoc.drop_collection() |  | ||||||
|  |  | ||||||
|         MyDoc().save() |  | ||||||
|  |  | ||||||
|         mydoc = MyDoc.objects.first() |  | ||||||
|         mydoc.subs['a'] = EmbeddedDoc() |  | ||||||
|         self.assertEqual(["subs.a"], mydoc._get_changed_fields()) |  | ||||||
|  |  | ||||||
|         subdoc = mydoc.subs['a'] |  | ||||||
|         subdoc.name = 'bar' |  | ||||||
|  |  | ||||||
|         self.assertEqual(["name"], subdoc._get_changed_fields()) |  | ||||||
|         self.assertEqual(["subs.a"], mydoc._get_changed_fields()) |  | ||||||
|         mydoc.save() |  | ||||||
|  |  | ||||||
|         mydoc._clear_changed_fields() |  | ||||||
|         self.assertEqual([], mydoc._get_changed_fields()) |  | ||||||
|  |  | ||||||
|     def test_upper_level_mark_as_changed(self): |  | ||||||
|         class EmbeddedDoc(EmbeddedDocument): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class MyDoc(Document): |  | ||||||
|             subs = MapField(EmbeddedDocumentField(EmbeddedDoc)) |  | ||||||
|  |  | ||||||
|         MyDoc.drop_collection() |  | ||||||
|  |  | ||||||
|         MyDoc(subs={'a': EmbeddedDoc(name='foo')}).save() |  | ||||||
|  |  | ||||||
|         mydoc = MyDoc.objects.first() |  | ||||||
|         subdoc = mydoc.subs['a'] |  | ||||||
|         subdoc.name = 'bar' |  | ||||||
|  |  | ||||||
|         self.assertEqual(["name"], subdoc._get_changed_fields()) |  | ||||||
|         self.assertEqual(["subs.a.name"], mydoc._get_changed_fields()) |  | ||||||
|  |  | ||||||
|         mydoc.subs['a'] = EmbeddedDoc() |  | ||||||
|         self.assertEqual(["subs.a"], mydoc._get_changed_fields()) |  | ||||||
|         mydoc.save() |  | ||||||
|  |  | ||||||
|         mydoc._clear_changed_fields() |  | ||||||
|         self.assertEqual([], mydoc._get_changed_fields()) |  | ||||||
|  |  | ||||||
|     def test_referenced_object_changed_attributes(self): |  | ||||||
|         """Ensures that when you save a new reference to a field, the referenced object isn't altered""" |  | ||||||
|  |  | ||||||
|         class Organization(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class User(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             org = ReferenceField('Organization', required=True) |  | ||||||
|  |  | ||||||
|         Organization.drop_collection() |  | ||||||
|         User.drop_collection() |  | ||||||
|  |  | ||||||
|         org1 = Organization(name='Org 1') |  | ||||||
|         org1.save() |  | ||||||
|  |  | ||||||
|         org2 = Organization(name='Org 2') |  | ||||||
|         org2.save() |  | ||||||
|  |  | ||||||
|         user = User(name='Fred', org=org1) |  | ||||||
|         user.save() |  | ||||||
|  |  | ||||||
|         org1.reload() |  | ||||||
|         org2.reload() |  | ||||||
|         user.reload() |  | ||||||
|         self.assertEqual(org1.name, 'Org 1') |  | ||||||
|         self.assertEqual(org2.name, 'Org 2') |  | ||||||
|         self.assertEqual(user.name, 'Fred') |  | ||||||
|  |  | ||||||
|         user.name = 'Harold' |  | ||||||
|         user.org = org2 |  | ||||||
|  |  | ||||||
|         org2.name = 'New Org 2' |  | ||||||
|         self.assertEqual(org2.name, 'New Org 2') |  | ||||||
|  |  | ||||||
|         user.save() |  | ||||||
|         org2.save() |  | ||||||
|  |  | ||||||
|         self.assertEqual(org2.name, 'New Org 2') |  | ||||||
|         org2.reload() |  | ||||||
|         self.assertEqual(org2.name, 'New Org 2') |  | ||||||
|  |  | ||||||
|     def test_delta_for_nested_map_fields(self): |  | ||||||
|         class UInfoDocument(Document): |  | ||||||
|             phone = StringField() |  | ||||||
|  |  | ||||||
|         class EmbeddedRole(EmbeddedDocument): |  | ||||||
|             type = StringField() |  | ||||||
|  |  | ||||||
|         class EmbeddedUser(EmbeddedDocument): |  | ||||||
|             name = StringField() |  | ||||||
|             roles = MapField(field=EmbeddedDocumentField(EmbeddedRole)) |  | ||||||
|             rolist = ListField(field=EmbeddedDocumentField(EmbeddedRole)) |  | ||||||
|             info = ReferenceField(UInfoDocument) |  | ||||||
|  |  | ||||||
|         class Doc(Document): |  | ||||||
|             users = MapField(field=EmbeddedDocumentField(EmbeddedUser)) |  | ||||||
|             num = IntField(default=-1) |  | ||||||
|  |  | ||||||
|         Doc.drop_collection() |  | ||||||
|  |  | ||||||
|         doc = Doc(num=1) |  | ||||||
|         doc.users["007"] = EmbeddedUser(name="Agent007") |  | ||||||
|         doc.save() |  | ||||||
|  |  | ||||||
|         uinfo = UInfoDocument(phone="79089269066") |  | ||||||
|         uinfo.save() |  | ||||||
|  |  | ||||||
|         d = Doc.objects(num=1).first() |  | ||||||
|         d.users["007"]["roles"]["666"] = EmbeddedRole(type="superadmin") |  | ||||||
|         d.users["007"]["rolist"].append(EmbeddedRole(type="oops")) |  | ||||||
|         d.users["007"]["info"] = uinfo |  | ||||||
|         delta = d._delta() |  | ||||||
|         self.assertEqual(True, "users.007.roles.666" in delta[0]) |  | ||||||
|         self.assertEqual(True, "users.007.rolist" in delta[0]) |  | ||||||
|         self.assertEqual(True, "users.007.info" in delta[0]) |  | ||||||
|         self.assertEqual('superadmin', delta[0]["users.007.roles.666"]["type"]) |  | ||||||
|         self.assertEqual('oops', delta[0]["users.007.rolist"][0]["type"]) |  | ||||||
|         self.assertEqual(uinfo.id, delta[0]["users.007.info"]) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     unittest.main() |  | ||||||
| @@ -1,564 +0,0 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import unittest |  | ||||||
| import warnings |  | ||||||
|  |  | ||||||
| from six import iteritems |  | ||||||
|  |  | ||||||
| from mongoengine import (BooleanField, Document, EmbeddedDocument, |  | ||||||
|                          EmbeddedDocumentField, GenericReferenceField, |  | ||||||
|                          IntField, ReferenceField, StringField) |  | ||||||
| from mongoengine.pymongo_support import list_collection_names |  | ||||||
| from tests.utils import MongoDBTestCase |  | ||||||
| from tests.fixtures import Base |  | ||||||
|  |  | ||||||
| __all__ = ('InheritanceTest', ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class InheritanceTest(MongoDBTestCase): |  | ||||||
|  |  | ||||||
|     def tearDown(self): |  | ||||||
|         for collection in list_collection_names(self.db): |  | ||||||
|             self.db.drop_collection(collection) |  | ||||||
|  |  | ||||||
|     def test_constructor_cls(self): |  | ||||||
|         # Ensures _cls is properly set during construction |  | ||||||
|         # and when object gets reloaded (prevent regression of #1950) |  | ||||||
|         class EmbedData(EmbeddedDocument): |  | ||||||
|             data = StringField() |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|  |  | ||||||
|         class DataDoc(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             embed = EmbeddedDocumentField(EmbedData) |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|  |  | ||||||
|         test_doc = DataDoc(name='test', embed=EmbedData(data='data')) |  | ||||||
|         self.assertEqual(test_doc._cls, 'DataDoc') |  | ||||||
|         self.assertEqual(test_doc.embed._cls, 'EmbedData') |  | ||||||
|         test_doc.save() |  | ||||||
|         saved_doc = DataDoc.objects.with_id(test_doc.id) |  | ||||||
|         self.assertEqual(test_doc._cls, saved_doc._cls) |  | ||||||
|         self.assertEqual(test_doc.embed._cls, saved_doc.embed._cls) |  | ||||||
|         test_doc.delete() |  | ||||||
|  |  | ||||||
|     def test_superclasses(self): |  | ||||||
|         """Ensure that the correct list of superclasses is assembled. |  | ||||||
|         """ |  | ||||||
|         class Animal(Document): |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|         class Fish(Animal): pass |  | ||||||
|         class Guppy(Fish): pass |  | ||||||
|         class Mammal(Animal): pass |  | ||||||
|         class Dog(Mammal): pass |  | ||||||
|         class Human(Mammal): pass |  | ||||||
|  |  | ||||||
|         self.assertEqual(Animal._superclasses, ()) |  | ||||||
|         self.assertEqual(Fish._superclasses, ('Animal',)) |  | ||||||
|         self.assertEqual(Guppy._superclasses, ('Animal', 'Animal.Fish')) |  | ||||||
|         self.assertEqual(Mammal._superclasses, ('Animal',)) |  | ||||||
|         self.assertEqual(Dog._superclasses, ('Animal', 'Animal.Mammal')) |  | ||||||
|         self.assertEqual(Human._superclasses, ('Animal', 'Animal.Mammal')) |  | ||||||
|  |  | ||||||
|     def test_external_superclasses(self): |  | ||||||
|         """Ensure that the correct list of super classes is assembled when |  | ||||||
|         importing part of the model. |  | ||||||
|         """ |  | ||||||
|         class Animal(Base): pass |  | ||||||
|         class Fish(Animal): pass |  | ||||||
|         class Guppy(Fish): pass |  | ||||||
|         class Mammal(Animal): pass |  | ||||||
|         class Dog(Mammal): pass |  | ||||||
|         class Human(Mammal): pass |  | ||||||
|  |  | ||||||
|         self.assertEqual(Animal._superclasses, ('Base', )) |  | ||||||
|         self.assertEqual(Fish._superclasses, ('Base', 'Base.Animal',)) |  | ||||||
|         self.assertEqual(Guppy._superclasses, ('Base', 'Base.Animal', |  | ||||||
|                                                'Base.Animal.Fish')) |  | ||||||
|         self.assertEqual(Mammal._superclasses, ('Base', 'Base.Animal',)) |  | ||||||
|         self.assertEqual(Dog._superclasses, ('Base', 'Base.Animal', |  | ||||||
|                                              'Base.Animal.Mammal')) |  | ||||||
|         self.assertEqual(Human._superclasses, ('Base', 'Base.Animal', |  | ||||||
|                                                'Base.Animal.Mammal')) |  | ||||||
|  |  | ||||||
|     def test_subclasses(self): |  | ||||||
|         """Ensure that the correct list of _subclasses (subclasses) is |  | ||||||
|         assembled. |  | ||||||
|         """ |  | ||||||
|         class Animal(Document): |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|         class Fish(Animal): pass |  | ||||||
|         class Guppy(Fish): pass |  | ||||||
|         class Mammal(Animal): pass |  | ||||||
|         class Dog(Mammal): pass |  | ||||||
|         class Human(Mammal): pass |  | ||||||
|  |  | ||||||
|         self.assertEqual(Animal._subclasses, ('Animal', |  | ||||||
|                                          'Animal.Fish', |  | ||||||
|                                          'Animal.Fish.Guppy', |  | ||||||
|                                          'Animal.Mammal', |  | ||||||
|                                          'Animal.Mammal.Dog', |  | ||||||
|                                          'Animal.Mammal.Human')) |  | ||||||
|         self.assertEqual(Fish._subclasses, ('Animal.Fish', |  | ||||||
|                                        'Animal.Fish.Guppy',)) |  | ||||||
|         self.assertEqual(Guppy._subclasses, ('Animal.Fish.Guppy',)) |  | ||||||
|         self.assertEqual(Mammal._subclasses, ('Animal.Mammal', |  | ||||||
|                                          'Animal.Mammal.Dog', |  | ||||||
|                                          'Animal.Mammal.Human')) |  | ||||||
|         self.assertEqual(Human._subclasses, ('Animal.Mammal.Human',)) |  | ||||||
|  |  | ||||||
|     def test_external_subclasses(self): |  | ||||||
|         """Ensure that the correct list of _subclasses (subclasses) is |  | ||||||
|         assembled when importing part of the model. |  | ||||||
|         """ |  | ||||||
|         class Animal(Base): pass |  | ||||||
|         class Fish(Animal): pass |  | ||||||
|         class Guppy(Fish): pass |  | ||||||
|         class Mammal(Animal): pass |  | ||||||
|         class Dog(Mammal): pass |  | ||||||
|         class Human(Mammal): pass |  | ||||||
|  |  | ||||||
|         self.assertEqual(Animal._subclasses, ('Base.Animal', |  | ||||||
|                                               'Base.Animal.Fish', |  | ||||||
|                                               'Base.Animal.Fish.Guppy', |  | ||||||
|                                               'Base.Animal.Mammal', |  | ||||||
|                                               'Base.Animal.Mammal.Dog', |  | ||||||
|                                               'Base.Animal.Mammal.Human')) |  | ||||||
|         self.assertEqual(Fish._subclasses, ('Base.Animal.Fish', |  | ||||||
|                                             'Base.Animal.Fish.Guppy',)) |  | ||||||
|         self.assertEqual(Guppy._subclasses, ('Base.Animal.Fish.Guppy',)) |  | ||||||
|         self.assertEqual(Mammal._subclasses, ('Base.Animal.Mammal', |  | ||||||
|                                               'Base.Animal.Mammal.Dog', |  | ||||||
|                                               'Base.Animal.Mammal.Human')) |  | ||||||
|         self.assertEqual(Human._subclasses, ('Base.Animal.Mammal.Human',)) |  | ||||||
|  |  | ||||||
|     def test_dynamic_declarations(self): |  | ||||||
|         """Test that declaring an extra class updates meta data""" |  | ||||||
|  |  | ||||||
|         class Animal(Document): |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|  |  | ||||||
|         self.assertEqual(Animal._superclasses, ()) |  | ||||||
|         self.assertEqual(Animal._subclasses, ('Animal',)) |  | ||||||
|  |  | ||||||
|         # Test dynamically adding a class changes the meta data |  | ||||||
|         class Fish(Animal): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         self.assertEqual(Animal._superclasses, ()) |  | ||||||
|         self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish')) |  | ||||||
|  |  | ||||||
|         self.assertEqual(Fish._superclasses, ('Animal', )) |  | ||||||
|         self.assertEqual(Fish._subclasses, ('Animal.Fish',)) |  | ||||||
|  |  | ||||||
|         # Test dynamically adding an inherited class changes the meta data |  | ||||||
|         class Pike(Fish): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         self.assertEqual(Animal._superclasses, ()) |  | ||||||
|         self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish', |  | ||||||
|                                               'Animal.Fish.Pike')) |  | ||||||
|  |  | ||||||
|         self.assertEqual(Fish._superclasses, ('Animal', )) |  | ||||||
|         self.assertEqual(Fish._subclasses, ('Animal.Fish', 'Animal.Fish.Pike')) |  | ||||||
|  |  | ||||||
|         self.assertEqual(Pike._superclasses, ('Animal', 'Animal.Fish')) |  | ||||||
|         self.assertEqual(Pike._subclasses, ('Animal.Fish.Pike',)) |  | ||||||
|  |  | ||||||
|     def test_inheritance_meta_data(self): |  | ||||||
|         """Ensure that document may inherit fields from a superclass document. |  | ||||||
|         """ |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             age = IntField() |  | ||||||
|  |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|  |  | ||||||
|         class Employee(Person): |  | ||||||
|             salary = IntField() |  | ||||||
|  |  | ||||||
|         self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'], |  | ||||||
|                          sorted(Employee._fields.keys())) |  | ||||||
|         self.assertEqual(Employee._get_collection_name(), |  | ||||||
|                          Person._get_collection_name()) |  | ||||||
|  |  | ||||||
|     def test_inheritance_to_mongo_keys(self): |  | ||||||
|         """Ensure that document may inherit fields from a superclass document. |  | ||||||
|         """ |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             age = IntField() |  | ||||||
|  |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|  |  | ||||||
|         class Employee(Person): |  | ||||||
|             salary = IntField() |  | ||||||
|  |  | ||||||
|         self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'], |  | ||||||
|                          sorted(Employee._fields.keys())) |  | ||||||
|         self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(), |  | ||||||
|                          ['_cls', 'name', 'age']) |  | ||||||
|         self.assertEqual(Employee(name="Bob", age=35, salary=0).to_mongo().keys(), |  | ||||||
|                          ['_cls', 'name', 'age', 'salary']) |  | ||||||
|         self.assertEqual(Employee._get_collection_name(), |  | ||||||
|                          Person._get_collection_name()) |  | ||||||
|  |  | ||||||
|     def test_indexes_and_multiple_inheritance(self): |  | ||||||
|         """ Ensure that all of the indexes are created for a document with |  | ||||||
|         multiple inheritance. |  | ||||||
|         """ |  | ||||||
|  |  | ||||||
|         class A(Document): |  | ||||||
|             a = StringField() |  | ||||||
|  |  | ||||||
|             meta = { |  | ||||||
|                 'allow_inheritance': True, |  | ||||||
|                 'indexes': ['a'] |  | ||||||
|             } |  | ||||||
|  |  | ||||||
|         class B(Document): |  | ||||||
|             b = StringField() |  | ||||||
|  |  | ||||||
|             meta = { |  | ||||||
|                 'allow_inheritance': True, |  | ||||||
|                 'indexes': ['b'] |  | ||||||
|             } |  | ||||||
|  |  | ||||||
|         class C(A, B): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         A.drop_collection() |  | ||||||
|         B.drop_collection() |  | ||||||
|         C.drop_collection() |  | ||||||
|  |  | ||||||
|         C.ensure_indexes() |  | ||||||
|  |  | ||||||
|         self.assertEqual( |  | ||||||
|             sorted([idx['key'] for idx in C._get_collection().index_information().values()]), |  | ||||||
|             sorted([[(u'_cls', 1), (u'b', 1)], [(u'_id', 1)], [(u'_cls', 1), (u'a', 1)]]) |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_polymorphic_queries(self): |  | ||||||
|         """Ensure that the correct subclasses are returned from a query |  | ||||||
|         """ |  | ||||||
|  |  | ||||||
|         class Animal(Document): |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|         class Fish(Animal): pass |  | ||||||
|         class Mammal(Animal): pass |  | ||||||
|         class Dog(Mammal): pass |  | ||||||
|         class Human(Mammal): pass |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |  | ||||||
|  |  | ||||||
|         Animal().save() |  | ||||||
|         Fish().save() |  | ||||||
|         Mammal().save() |  | ||||||
|         Dog().save() |  | ||||||
|         Human().save() |  | ||||||
|  |  | ||||||
|         classes = [obj.__class__ for obj in Animal.objects] |  | ||||||
|         self.assertEqual(classes, [Animal, Fish, Mammal, Dog, Human]) |  | ||||||
|  |  | ||||||
|         classes = [obj.__class__ for obj in Mammal.objects] |  | ||||||
|         self.assertEqual(classes, [Mammal, Dog, Human]) |  | ||||||
|  |  | ||||||
|         classes = [obj.__class__ for obj in Human.objects] |  | ||||||
|         self.assertEqual(classes, [Human]) |  | ||||||
|  |  | ||||||
|     def test_allow_inheritance(self): |  | ||||||
|         """Ensure that inheritance is disabled by default on simple |  | ||||||
|         classes and that _cls will not be used. |  | ||||||
|         """ |  | ||||||
|         class Animal(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         # can't inherit because Animal didn't explicitly allow inheritance |  | ||||||
|         with self.assertRaises(ValueError) as cm: |  | ||||||
|             class Dog(Animal): |  | ||||||
|                 pass |  | ||||||
|         self.assertIn("Document Animal may not be subclassed", str(cm.exception)) |  | ||||||
|  |  | ||||||
|         # Check that _cls etc aren't present on simple documents |  | ||||||
|         dog = Animal(name='dog').save() |  | ||||||
|         self.assertEqual(dog.to_mongo().keys(), ['_id', 'name']) |  | ||||||
|  |  | ||||||
|         collection = self.db[Animal._get_collection_name()] |  | ||||||
|         obj = collection.find_one() |  | ||||||
|         self.assertNotIn('_cls', obj) |  | ||||||
|  |  | ||||||
|     def test_cant_turn_off_inheritance_on_subclass(self): |  | ||||||
|         """Ensure if inheritance is on in a subclass you cant turn it off. |  | ||||||
|         """ |  | ||||||
|         class Animal(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|  |  | ||||||
|         with self.assertRaises(ValueError) as cm: |  | ||||||
|             class Mammal(Animal): |  | ||||||
|                 meta = {'allow_inheritance': False} |  | ||||||
|         self.assertEqual(str(cm.exception), 'Only direct subclasses of Document may set "allow_inheritance" to False') |  | ||||||
|  |  | ||||||
|     def test_allow_inheritance_abstract_document(self): |  | ||||||
|         """Ensure that abstract documents can set inheritance rules and that |  | ||||||
|         _cls will not be used. |  | ||||||
|         """ |  | ||||||
|         class FinalDocument(Document): |  | ||||||
|             meta = {'abstract': True, |  | ||||||
|                     'allow_inheritance': False} |  | ||||||
|  |  | ||||||
|         class Animal(FinalDocument): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         with self.assertRaises(ValueError) as cm: |  | ||||||
|             class Mammal(Animal): |  | ||||||
|                 pass |  | ||||||
|  |  | ||||||
|         # Check that _cls isn't present in simple documents |  | ||||||
|         doc = Animal(name='dog') |  | ||||||
|         self.assertNotIn('_cls', doc.to_mongo()) |  | ||||||
|  |  | ||||||
|     def test_using_abstract_class_in_reference_field(self): |  | ||||||
|         # Ensures no regression of #1920 |  | ||||||
|         class AbstractHuman(Document): |  | ||||||
|             meta = {'abstract': True} |  | ||||||
|  |  | ||||||
|         class Dad(AbstractHuman): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class Home(Document): |  | ||||||
|             dad = ReferenceField(AbstractHuman)  # Referencing the abstract class |  | ||||||
|             address = StringField() |  | ||||||
|  |  | ||||||
|         dad = Dad(name='5').save() |  | ||||||
|         Home(dad=dad, address='street').save() |  | ||||||
|  |  | ||||||
|         home = Home.objects.first() |  | ||||||
|         home.address = 'garbage' |  | ||||||
|         home.save()     # Was failing with ValidationError |  | ||||||
|  |  | ||||||
|     def test_abstract_class_referencing_self(self): |  | ||||||
|         # Ensures no regression of #1920 |  | ||||||
|         class Human(Document): |  | ||||||
|             meta = {'abstract': True} |  | ||||||
|             creator = ReferenceField('self', dbref=True) |  | ||||||
|  |  | ||||||
|         class User(Human): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         user = User(name='John').save() |  | ||||||
|         user2 = User(name='Foo', creator=user).save() |  | ||||||
|  |  | ||||||
|         user2 = User.objects.with_id(user2.id) |  | ||||||
|         user2.name = 'Bar' |  | ||||||
|         user2.save()    # Was failing with ValidationError |  | ||||||
|  |  | ||||||
|     def test_abstract_handle_ids_in_metaclass_properly(self): |  | ||||||
|  |  | ||||||
|         class City(Document): |  | ||||||
|             continent = StringField() |  | ||||||
|             meta = {'abstract': True, |  | ||||||
|                     'allow_inheritance': False} |  | ||||||
|  |  | ||||||
|         class EuropeanCity(City): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         berlin = EuropeanCity(name='Berlin', continent='Europe') |  | ||||||
|         self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered)) |  | ||||||
|         self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered)) |  | ||||||
|         self.assertEqual(len(berlin._fields_ordered), 3) |  | ||||||
|         self.assertEqual(berlin._fields_ordered[0], 'id') |  | ||||||
|  |  | ||||||
|     def test_auto_id_not_set_if_specific_in_parent_class(self): |  | ||||||
|  |  | ||||||
|         class City(Document): |  | ||||||
|             continent = StringField() |  | ||||||
|             city_id = IntField(primary_key=True) |  | ||||||
|             meta = {'abstract': True, |  | ||||||
|                     'allow_inheritance': False} |  | ||||||
|  |  | ||||||
|         class EuropeanCity(City): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         berlin = EuropeanCity(name='Berlin', continent='Europe') |  | ||||||
|         self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered)) |  | ||||||
|         self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered)) |  | ||||||
|         self.assertEqual(len(berlin._fields_ordered), 3) |  | ||||||
|         self.assertEqual(berlin._fields_ordered[0], 'city_id') |  | ||||||
|  |  | ||||||
|     def test_auto_id_vs_non_pk_id_field(self): |  | ||||||
|  |  | ||||||
|         class City(Document): |  | ||||||
|             continent = StringField() |  | ||||||
|             id = IntField() |  | ||||||
|             meta = {'abstract': True, |  | ||||||
|                     'allow_inheritance': False} |  | ||||||
|  |  | ||||||
|         class EuropeanCity(City): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         berlin = EuropeanCity(name='Berlin', continent='Europe') |  | ||||||
|         self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered)) |  | ||||||
|         self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered)) |  | ||||||
|         self.assertEqual(len(berlin._fields_ordered), 4) |  | ||||||
|         self.assertEqual(berlin._fields_ordered[0], 'auto_id_0') |  | ||||||
|         berlin.save() |  | ||||||
|         self.assertEqual(berlin.pk, berlin.auto_id_0) |  | ||||||
|  |  | ||||||
|     def test_abstract_document_creation_does_not_fail(self): |  | ||||||
|         class City(Document): |  | ||||||
|             continent = StringField() |  | ||||||
|             meta = {'abstract': True, |  | ||||||
|                     'allow_inheritance': False} |  | ||||||
|  |  | ||||||
|         city = City(continent='asia') |  | ||||||
|         self.assertEqual(None, city.pk) |  | ||||||
|         # TODO: expected error? Shouldn't we create a new error type? |  | ||||||
|         with self.assertRaises(KeyError): |  | ||||||
|             setattr(city, 'pk', 1) |  | ||||||
|  |  | ||||||
|     def test_allow_inheritance_embedded_document(self): |  | ||||||
|         """Ensure embedded documents respect inheritance.""" |  | ||||||
|         class Comment(EmbeddedDocument): |  | ||||||
|             content = StringField() |  | ||||||
|  |  | ||||||
|         with self.assertRaises(ValueError): |  | ||||||
|             class SpecialComment(Comment): |  | ||||||
|                 pass |  | ||||||
|  |  | ||||||
|         doc = Comment(content='test') |  | ||||||
|         self.assertNotIn('_cls', doc.to_mongo()) |  | ||||||
|  |  | ||||||
|         class Comment(EmbeddedDocument): |  | ||||||
|             content = StringField() |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|  |  | ||||||
|         doc = Comment(content='test') |  | ||||||
|         self.assertIn('_cls', doc.to_mongo()) |  | ||||||
|  |  | ||||||
|     def test_document_inheritance(self): |  | ||||||
|         """Ensure mutliple inheritance of abstract documents |  | ||||||
|         """ |  | ||||||
|         class DateCreatedDocument(Document): |  | ||||||
|             meta = { |  | ||||||
|                 'allow_inheritance': True, |  | ||||||
|                 'abstract': True, |  | ||||||
|             } |  | ||||||
|  |  | ||||||
|         class DateUpdatedDocument(Document): |  | ||||||
|             meta = { |  | ||||||
|                 'allow_inheritance': True, |  | ||||||
|                 'abstract': True, |  | ||||||
|             } |  | ||||||
|  |  | ||||||
|         try: |  | ||||||
|             class MyDocument(DateCreatedDocument, DateUpdatedDocument): |  | ||||||
|                 pass |  | ||||||
|         except Exception: |  | ||||||
|             self.assertTrue(False, "Couldn't create MyDocument class") |  | ||||||
|  |  | ||||||
|     def test_abstract_documents(self): |  | ||||||
|         """Ensure that a document superclass can be marked as abstract |  | ||||||
|         thereby not using it as the name for the collection.""" |  | ||||||
|  |  | ||||||
|         defaults = {'index_background': True, |  | ||||||
|                     'index_drop_dups': True, |  | ||||||
|                     'index_opts': {'hello': 'world'}, |  | ||||||
|                     'allow_inheritance': True, |  | ||||||
|                     'queryset_class': 'QuerySet', |  | ||||||
|                     'db_alias': 'myDB', |  | ||||||
|                     'shard_key': ('hello', 'world')} |  | ||||||
|  |  | ||||||
|         meta_settings = {'abstract': True} |  | ||||||
|         meta_settings.update(defaults) |  | ||||||
|  |  | ||||||
|         class Animal(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             meta = meta_settings |  | ||||||
|  |  | ||||||
|         class Fish(Animal): pass |  | ||||||
|         class Guppy(Fish): pass |  | ||||||
|  |  | ||||||
|         class Mammal(Animal): |  | ||||||
|             meta = {'abstract': True} |  | ||||||
|         class Human(Mammal): pass |  | ||||||
|  |  | ||||||
|         for k, v in iteritems(defaults): |  | ||||||
|             for cls in [Animal, Fish, Guppy]: |  | ||||||
|                 self.assertEqual(cls._meta[k], v) |  | ||||||
|  |  | ||||||
|         self.assertNotIn('collection', Animal._meta) |  | ||||||
|         self.assertNotIn('collection', Mammal._meta) |  | ||||||
|  |  | ||||||
|         self.assertEqual(Animal._get_collection_name(), None) |  | ||||||
|         self.assertEqual(Mammal._get_collection_name(), None) |  | ||||||
|  |  | ||||||
|         self.assertEqual(Fish._get_collection_name(), 'fish') |  | ||||||
|         self.assertEqual(Guppy._get_collection_name(), 'fish') |  | ||||||
|         self.assertEqual(Human._get_collection_name(), 'human') |  | ||||||
|  |  | ||||||
|         # ensure that a subclass of a non-abstract class can't be abstract |  | ||||||
|         with self.assertRaises(ValueError): |  | ||||||
|             class EvilHuman(Human): |  | ||||||
|                 evil = BooleanField(default=True) |  | ||||||
|                 meta = {'abstract': True} |  | ||||||
|  |  | ||||||
|     def test_abstract_embedded_documents(self): |  | ||||||
|         # 789: EmbeddedDocument shouldn't inherit abstract |  | ||||||
|         class A(EmbeddedDocument): |  | ||||||
|             meta = {"abstract": True} |  | ||||||
|  |  | ||||||
|         class B(A): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         self.assertFalse(B._meta["abstract"]) |  | ||||||
|  |  | ||||||
|     def test_inherited_collections(self): |  | ||||||
|         """Ensure that subclassed documents don't override parents' |  | ||||||
|         collections |  | ||||||
|         """ |  | ||||||
|  |  | ||||||
|         class Drink(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|  |  | ||||||
|         class Drinker(Document): |  | ||||||
|             drink = GenericReferenceField() |  | ||||||
|  |  | ||||||
|         try: |  | ||||||
|             warnings.simplefilter("error") |  | ||||||
|  |  | ||||||
|             class AcloholicDrink(Drink): |  | ||||||
|                 meta = {'collection': 'booze'} |  | ||||||
|  |  | ||||||
|         except SyntaxWarning: |  | ||||||
|             warnings.simplefilter("ignore") |  | ||||||
|  |  | ||||||
|             class AlcoholicDrink(Drink): |  | ||||||
|                 meta = {'collection': 'booze'} |  | ||||||
|  |  | ||||||
|         else: |  | ||||||
|             raise AssertionError("SyntaxWarning should be triggered") |  | ||||||
|  |  | ||||||
|         warnings.resetwarnings() |  | ||||||
|  |  | ||||||
|         Drink.drop_collection() |  | ||||||
|         AlcoholicDrink.drop_collection() |  | ||||||
|         Drinker.drop_collection() |  | ||||||
|  |  | ||||||
|         red_bull = Drink(name='Red Bull') |  | ||||||
|         red_bull.save() |  | ||||||
|  |  | ||||||
|         programmer = Drinker(drink=red_bull) |  | ||||||
|         programmer.save() |  | ||||||
|  |  | ||||||
|         beer = AlcoholicDrink(name='Beer') |  | ||||||
|         beer.save() |  | ||||||
|         real_person = Drinker(drink=beer) |  | ||||||
|         real_person.save() |  | ||||||
|  |  | ||||||
|         self.assertEqual(Drinker.objects[0].drink.name, red_bull.name) |  | ||||||
|         self.assertEqual(Drinker.objects[1].drink.name, beer.name) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     unittest.main() |  | ||||||
| @@ -2,18 +2,14 @@ | |||||||
| import unittest | import unittest | ||||||
| 
 | 
 | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| from mongoengine.pymongo_support import list_collection_names |  | ||||||
| 
 |  | ||||||
| from mongoengine.queryset import NULLIFY, PULL |  | ||||||
| from mongoengine.connection import get_db | from mongoengine.connection import get_db | ||||||
| 
 | from mongoengine.pymongo_support import list_collection_names | ||||||
| __all__ = ("ClassMethodsTest", ) | from mongoengine.queryset import NULLIFY, PULL | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class ClassMethodsTest(unittest.TestCase): | class TestClassMethods(unittest.TestCase): | ||||||
| 
 |  | ||||||
|     def setUp(self): |     def setUp(self): | ||||||
|         connect(db='mongoenginetest') |         connect(db="mongoenginetest") | ||||||
|         self.db = get_db() |         self.db = get_db() | ||||||
| 
 | 
 | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
| @@ -33,54 +29,53 @@ class ClassMethodsTest(unittest.TestCase): | |||||||
|     def test_definition(self): |     def test_definition(self): | ||||||
|         """Ensure that document may be defined using fields. |         """Ensure that document may be defined using fields. | ||||||
|         """ |         """ | ||||||
|         self.assertEqual(['_cls', 'age', 'id', 'name'], |         assert ["_cls", "age", "id", "name"] == sorted(self.Person._fields.keys()) | ||||||
|                          sorted(self.Person._fields.keys())) |         assert ["IntField", "ObjectIdField", "StringField", "StringField"] == sorted( | ||||||
|         self.assertEqual(["IntField", "ObjectIdField", "StringField", "StringField"], |             [x.__class__.__name__ for x in self.Person._fields.values()] | ||||||
|                         sorted([x.__class__.__name__ for x in |         ) | ||||||
|                                 self.Person._fields.values()])) |  | ||||||
| 
 | 
 | ||||||
|     def test_get_db(self): |     def test_get_db(self): | ||||||
|         """Ensure that get_db returns the expected db. |         """Ensure that get_db returns the expected db. | ||||||
|         """ |         """ | ||||||
|         db = self.Person._get_db() |         db = self.Person._get_db() | ||||||
|         self.assertEqual(self.db, db) |         assert self.db == db | ||||||
| 
 | 
 | ||||||
|     def test_get_collection_name(self): |     def test_get_collection_name(self): | ||||||
|         """Ensure that get_collection_name returns the expected collection |         """Ensure that get_collection_name returns the expected collection | ||||||
|         name. |         name. | ||||||
|         """ |         """ | ||||||
|         collection_name = 'person' |         collection_name = "person" | ||||||
|         self.assertEqual(collection_name, self.Person._get_collection_name()) |         assert collection_name == self.Person._get_collection_name() | ||||||
| 
 | 
 | ||||||
|     def test_get_collection(self): |     def test_get_collection(self): | ||||||
|         """Ensure that get_collection returns the expected collection. |         """Ensure that get_collection returns the expected collection. | ||||||
|         """ |         """ | ||||||
|         collection_name = 'person' |         collection_name = "person" | ||||||
|         collection = self.Person._get_collection() |         collection = self.Person._get_collection() | ||||||
|         self.assertEqual(self.db[collection_name], collection) |         assert self.db[collection_name] == collection | ||||||
| 
 | 
 | ||||||
|     def test_drop_collection(self): |     def test_drop_collection(self): | ||||||
|         """Ensure that the collection may be dropped from the database. |         """Ensure that the collection may be dropped from the database. | ||||||
|         """ |         """ | ||||||
|         collection_name = 'person' |         collection_name = "person" | ||||||
|         self.Person(name='Test').save() |         self.Person(name="Test").save() | ||||||
|         self.assertIn(collection_name, list_collection_names(self.db)) |         assert collection_name in list_collection_names(self.db) | ||||||
| 
 | 
 | ||||||
|         self.Person.drop_collection() |         self.Person.drop_collection() | ||||||
|         self.assertNotIn(collection_name, list_collection_names(self.db)) |         assert collection_name not in list_collection_names(self.db) | ||||||
| 
 | 
 | ||||||
|     def test_register_delete_rule(self): |     def test_register_delete_rule(self): | ||||||
|         """Ensure that register delete rule adds a delete rule to the document |         """Ensure that register delete rule adds a delete rule to the document | ||||||
|         meta. |         meta. | ||||||
|         """ |         """ | ||||||
|  | 
 | ||||||
|         class Job(Document): |         class Job(Document): | ||||||
|             employee = ReferenceField(self.Person) |             employee = ReferenceField(self.Person) | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(self.Person._meta.get('delete_rules'), None) |         assert self.Person._meta.get("delete_rules") is None | ||||||
| 
 | 
 | ||||||
|         self.Person.register_delete_rule(Job, 'employee', NULLIFY) |         self.Person.register_delete_rule(Job, "employee", NULLIFY) | ||||||
|         self.assertEqual(self.Person._meta['delete_rules'], |         assert self.Person._meta["delete_rules"] == {(Job, "employee"): NULLIFY} | ||||||
|                          {(Job, 'employee'): NULLIFY}) |  | ||||||
| 
 | 
 | ||||||
|     def test_compare_indexes(self): |     def test_compare_indexes(self): | ||||||
|         """ Ensure that the indexes are properly created and that |         """ Ensure that the indexes are properly created and that | ||||||
| @@ -93,23 +88,27 @@ class ClassMethodsTest(unittest.TestCase): | |||||||
|             description = StringField() |             description = StringField() | ||||||
|             tags = StringField() |             tags = StringField() | ||||||
| 
 | 
 | ||||||
|             meta = { |             meta = {"indexes": [("author", "title")]} | ||||||
|                 'indexes': [('author', 'title')] |  | ||||||
|             } |  | ||||||
| 
 | 
 | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
| 
 | 
 | ||||||
|         BlogPost.ensure_indexes() |         BlogPost.ensure_indexes() | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []}) |         assert BlogPost.compare_indexes() == {"missing": [], "extra": []} | ||||||
| 
 | 
 | ||||||
|         BlogPost.ensure_index(['author', 'description']) |         BlogPost.ensure_index(["author", "description"]) | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': [[('author', 1), ('description', 1)]]}) |         assert BlogPost.compare_indexes() == { | ||||||
|  |             "missing": [], | ||||||
|  |             "extra": [[("author", 1), ("description", 1)]], | ||||||
|  |         } | ||||||
| 
 | 
 | ||||||
|         BlogPost._get_collection().drop_index('author_1_description_1') |         BlogPost._get_collection().drop_index("author_1_description_1") | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []}) |         assert BlogPost.compare_indexes() == {"missing": [], "extra": []} | ||||||
| 
 | 
 | ||||||
|         BlogPost._get_collection().drop_index('author_1_title_1') |         BlogPost._get_collection().drop_index("author_1_title_1") | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), {'missing': [[('author', 1), ('title', 1)]], 'extra': []}) |         assert BlogPost.compare_indexes() == { | ||||||
|  |             "missing": [[("author", 1), ("title", 1)]], | ||||||
|  |             "extra": [], | ||||||
|  |         } | ||||||
| 
 | 
 | ||||||
|     def test_compare_indexes_inheritance(self): |     def test_compare_indexes_inheritance(self): | ||||||
|         """ Ensure that the indexes are properly created and that |         """ Ensure that the indexes are properly created and that | ||||||
| @@ -122,32 +121,34 @@ class ClassMethodsTest(unittest.TestCase): | |||||||
|             title = StringField() |             title = StringField() | ||||||
|             description = StringField() |             description = StringField() | ||||||
| 
 | 
 | ||||||
|             meta = { |             meta = {"allow_inheritance": True} | ||||||
|                 'allow_inheritance': True |  | ||||||
|             } |  | ||||||
| 
 | 
 | ||||||
|         class BlogPostWithTags(BlogPost): |         class BlogPostWithTags(BlogPost): | ||||||
|             tags = StringField() |             tags = StringField() | ||||||
|             tag_list = ListField(StringField()) |             tag_list = ListField(StringField()) | ||||||
| 
 | 
 | ||||||
|             meta = { |             meta = {"indexes": [("author", "tags")]} | ||||||
|                 'indexes': [('author', 'tags')] |  | ||||||
|             } |  | ||||||
| 
 | 
 | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
| 
 | 
 | ||||||
|         BlogPost.ensure_indexes() |         BlogPost.ensure_indexes() | ||||||
|         BlogPostWithTags.ensure_indexes() |         BlogPostWithTags.ensure_indexes() | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []}) |         assert BlogPost.compare_indexes() == {"missing": [], "extra": []} | ||||||
| 
 | 
 | ||||||
|         BlogPostWithTags.ensure_index(['author', 'tag_list']) |         BlogPostWithTags.ensure_index(["author", "tag_list"]) | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': [[('_cls', 1), ('author', 1), ('tag_list', 1)]]}) |         assert BlogPost.compare_indexes() == { | ||||||
|  |             "missing": [], | ||||||
|  |             "extra": [[("_cls", 1), ("author", 1), ("tag_list", 1)]], | ||||||
|  |         } | ||||||
| 
 | 
 | ||||||
|         BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tag_list_1') |         BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tag_list_1") | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []}) |         assert BlogPost.compare_indexes() == {"missing": [], "extra": []} | ||||||
| 
 | 
 | ||||||
|         BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tags_1') |         BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tags_1") | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), {'missing': [[('_cls', 1), ('author', 1), ('tags', 1)]], 'extra': []}) |         assert BlogPost.compare_indexes() == { | ||||||
|  |             "missing": [[("_cls", 1), ("author", 1), ("tags", 1)]], | ||||||
|  |             "extra": [], | ||||||
|  |         } | ||||||
| 
 | 
 | ||||||
|     def test_compare_indexes_multiple_subclasses(self): |     def test_compare_indexes_multiple_subclasses(self): | ||||||
|         """ Ensure that compare_indexes behaves correctly if called from a |         """ Ensure that compare_indexes behaves correctly if called from a | ||||||
| @@ -159,32 +160,26 @@ class ClassMethodsTest(unittest.TestCase): | |||||||
|             title = StringField() |             title = StringField() | ||||||
|             description = StringField() |             description = StringField() | ||||||
| 
 | 
 | ||||||
|             meta = { |             meta = {"allow_inheritance": True} | ||||||
|                 'allow_inheritance': True |  | ||||||
|             } |  | ||||||
| 
 | 
 | ||||||
|         class BlogPostWithTags(BlogPost): |         class BlogPostWithTags(BlogPost): | ||||||
|             tags = StringField() |             tags = StringField() | ||||||
|             tag_list = ListField(StringField()) |             tag_list = ListField(StringField()) | ||||||
| 
 | 
 | ||||||
|             meta = { |             meta = {"indexes": [("author", "tags")]} | ||||||
|                 'indexes': [('author', 'tags')] |  | ||||||
|             } |  | ||||||
| 
 | 
 | ||||||
|         class BlogPostWithCustomField(BlogPost): |         class BlogPostWithCustomField(BlogPost): | ||||||
|             custom = DictField() |             custom = DictField() | ||||||
| 
 | 
 | ||||||
|             meta = { |             meta = {"indexes": [("author", "custom")]} | ||||||
|                 'indexes': [('author', 'custom')] |  | ||||||
|             } |  | ||||||
| 
 | 
 | ||||||
|         BlogPost.ensure_indexes() |         BlogPost.ensure_indexes() | ||||||
|         BlogPostWithTags.ensure_indexes() |         BlogPostWithTags.ensure_indexes() | ||||||
|         BlogPostWithCustomField.ensure_indexes() |         BlogPostWithCustomField.ensure_indexes() | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []}) |         assert BlogPost.compare_indexes() == {"missing": [], "extra": []} | ||||||
|         self.assertEqual(BlogPostWithTags.compare_indexes(), {'missing': [], 'extra': []}) |         assert BlogPostWithTags.compare_indexes() == {"missing": [], "extra": []} | ||||||
|         self.assertEqual(BlogPostWithCustomField.compare_indexes(), {'missing': [], 'extra': []}) |         assert BlogPostWithCustomField.compare_indexes() == {"missing": [], "extra": []} | ||||||
| 
 | 
 | ||||||
|     def test_compare_indexes_for_text_indexes(self): |     def test_compare_indexes_for_text_indexes(self): | ||||||
|         """ Ensure that compare_indexes behaves correctly for text indexes """ |         """ Ensure that compare_indexes behaves correctly for text indexes """ | ||||||
| @@ -192,18 +187,21 @@ class ClassMethodsTest(unittest.TestCase): | |||||||
|         class Doc(Document): |         class Doc(Document): | ||||||
|             a = StringField() |             a = StringField() | ||||||
|             b = StringField() |             b = StringField() | ||||||
|             meta = {'indexes': [ |             meta = { | ||||||
|                 {'fields': ['$a', "$b"], |                 "indexes": [ | ||||||
|                  'default_language': 'english', |                     { | ||||||
|                  'weights': {'a': 10, 'b': 2} |                         "fields": ["$a", "$b"], | ||||||
|                 } |                         "default_language": "english", | ||||||
|             ]} |                         "weights": {"a": 10, "b": 2}, | ||||||
|  |                     } | ||||||
|  |                 ] | ||||||
|  |             } | ||||||
| 
 | 
 | ||||||
|         Doc.drop_collection() |         Doc.drop_collection() | ||||||
|         Doc.ensure_indexes() |         Doc.ensure_indexes() | ||||||
|         actual = Doc.compare_indexes() |         actual = Doc.compare_indexes() | ||||||
|         expected = {'missing': [], 'extra': []} |         expected = {"missing": [], "extra": []} | ||||||
|         self.assertEqual(actual, expected) |         assert actual == expected | ||||||
| 
 | 
 | ||||||
|     def test_list_indexes_inheritance(self): |     def test_list_indexes_inheritance(self): | ||||||
|         """ ensure that all of the indexes are listed regardless of the super- |         """ ensure that all of the indexes are listed regardless of the super- | ||||||
| @@ -215,23 +213,17 @@ class ClassMethodsTest(unittest.TestCase): | |||||||
|             title = StringField() |             title = StringField() | ||||||
|             description = StringField() |             description = StringField() | ||||||
| 
 | 
 | ||||||
|             meta = { |             meta = {"allow_inheritance": True} | ||||||
|                 'allow_inheritance': True |  | ||||||
|             } |  | ||||||
| 
 | 
 | ||||||
|         class BlogPostWithTags(BlogPost): |         class BlogPostWithTags(BlogPost): | ||||||
|             tags = StringField() |             tags = StringField() | ||||||
| 
 | 
 | ||||||
|             meta = { |             meta = {"indexes": [("author", "tags")]} | ||||||
|                 'indexes': [('author', 'tags')] |  | ||||||
|             } |  | ||||||
| 
 | 
 | ||||||
|         class BlogPostWithTagsAndExtraText(BlogPostWithTags): |         class BlogPostWithTagsAndExtraText(BlogPostWithTags): | ||||||
|             extra_text = StringField() |             extra_text = StringField() | ||||||
| 
 | 
 | ||||||
|             meta = { |             meta = {"indexes": [("author", "tags", "extra_text")]} | ||||||
|                 'indexes': [('author', 'tags', 'extra_text')] |  | ||||||
|             } |  | ||||||
| 
 | 
 | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
| 
 | 
 | ||||||
| @@ -239,17 +231,16 @@ class ClassMethodsTest(unittest.TestCase): | |||||||
|         BlogPostWithTags.ensure_indexes() |         BlogPostWithTags.ensure_indexes() | ||||||
|         BlogPostWithTagsAndExtraText.ensure_indexes() |         BlogPostWithTagsAndExtraText.ensure_indexes() | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(BlogPost.list_indexes(), |         assert BlogPost.list_indexes() == BlogPostWithTags.list_indexes() | ||||||
|                          BlogPostWithTags.list_indexes()) |         assert BlogPost.list_indexes() == BlogPostWithTagsAndExtraText.list_indexes() | ||||||
|         self.assertEqual(BlogPost.list_indexes(), |         assert BlogPost.list_indexes() == [ | ||||||
|                          BlogPostWithTagsAndExtraText.list_indexes()) |             [("_cls", 1), ("author", 1), ("tags", 1)], | ||||||
|         self.assertEqual(BlogPost.list_indexes(), |             [("_cls", 1), ("author", 1), ("tags", 1), ("extra_text", 1)], | ||||||
|                          [[('_cls', 1), ('author', 1), ('tags', 1)], |             [(u"_id", 1)], | ||||||
|                          [('_cls', 1), ('author', 1), ('tags', 1), ('extra_text', 1)], |             [("_cls", 1)], | ||||||
|                          [(u'_id', 1)], [('_cls', 1)]]) |         ] | ||||||
| 
 | 
 | ||||||
|     def test_register_delete_rule_inherited(self): |     def test_register_delete_rule_inherited(self): | ||||||
| 
 |  | ||||||
|         class Vaccine(Document): |         class Vaccine(Document): | ||||||
|             name = StringField(required=True) |             name = StringField(required=True) | ||||||
| 
 | 
 | ||||||
| @@ -257,15 +248,17 @@ class ClassMethodsTest(unittest.TestCase): | |||||||
| 
 | 
 | ||||||
|         class Animal(Document): |         class Animal(Document): | ||||||
|             family = StringField(required=True) |             family = StringField(required=True) | ||||||
|             vaccine_made = ListField(ReferenceField("Vaccine", reverse_delete_rule=PULL)) |             vaccine_made = ListField( | ||||||
|  |                 ReferenceField("Vaccine", reverse_delete_rule=PULL) | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|             meta = {"allow_inheritance": True, "indexes": ["family"]} |             meta = {"allow_inheritance": True, "indexes": ["family"]} | ||||||
| 
 | 
 | ||||||
|         class Cat(Animal): |         class Cat(Animal): | ||||||
|             name = StringField(required=True) |             name = StringField(required=True) | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(Vaccine._meta['delete_rules'][(Animal, 'vaccine_made')], PULL) |         assert Vaccine._meta["delete_rules"][(Animal, "vaccine_made")] == PULL | ||||||
|         self.assertEqual(Vaccine._meta['delete_rules'][(Cat, 'vaccine_made')], PULL) |         assert Vaccine._meta["delete_rules"][(Cat, "vaccine_made")] == PULL | ||||||
| 
 | 
 | ||||||
|     def test_collection_naming(self): |     def test_collection_naming(self): | ||||||
|         """Ensure that a collection with a specified name may be used. |         """Ensure that a collection with a specified name may be used. | ||||||
| @@ -273,80 +266,76 @@ class ClassMethodsTest(unittest.TestCase): | |||||||
| 
 | 
 | ||||||
|         class DefaultNamingTest(Document): |         class DefaultNamingTest(Document): | ||||||
|             pass |             pass | ||||||
|         self.assertEqual('default_naming_test', | 
 | ||||||
|                          DefaultNamingTest._get_collection_name()) |         assert "default_naming_test" == DefaultNamingTest._get_collection_name() | ||||||
| 
 | 
 | ||||||
|         class CustomNamingTest(Document): |         class CustomNamingTest(Document): | ||||||
|             meta = {'collection': 'pimp_my_collection'} |             meta = {"collection": "pimp_my_collection"} | ||||||
| 
 | 
 | ||||||
|         self.assertEqual('pimp_my_collection', |         assert "pimp_my_collection" == CustomNamingTest._get_collection_name() | ||||||
|                          CustomNamingTest._get_collection_name()) |  | ||||||
| 
 | 
 | ||||||
|         class DynamicNamingTest(Document): |         class DynamicNamingTest(Document): | ||||||
|             meta = {'collection': lambda c: "DYNAMO"} |             meta = {"collection": lambda c: "DYNAMO"} | ||||||
|         self.assertEqual('DYNAMO', DynamicNamingTest._get_collection_name()) | 
 | ||||||
|  |         assert "DYNAMO" == DynamicNamingTest._get_collection_name() | ||||||
| 
 | 
 | ||||||
|         # Use Abstract class to handle backwards compatibility |         # Use Abstract class to handle backwards compatibility | ||||||
|         class BaseDocument(Document): |         class BaseDocument(Document): | ||||||
|             meta = { |             meta = {"abstract": True, "collection": lambda c: c.__name__.lower()} | ||||||
|                 'abstract': True, |  | ||||||
|                 'collection': lambda c: c.__name__.lower() |  | ||||||
|             } |  | ||||||
| 
 | 
 | ||||||
|         class OldNamingConvention(BaseDocument): |         class OldNamingConvention(BaseDocument): | ||||||
|             pass |             pass | ||||||
|         self.assertEqual('oldnamingconvention', | 
 | ||||||
|                          OldNamingConvention._get_collection_name()) |         assert "oldnamingconvention" == OldNamingConvention._get_collection_name() | ||||||
| 
 | 
 | ||||||
|         class InheritedAbstractNamingTest(BaseDocument): |         class InheritedAbstractNamingTest(BaseDocument): | ||||||
|             meta = {'collection': 'wibble'} |             meta = {"collection": "wibble"} | ||||||
|         self.assertEqual('wibble', | 
 | ||||||
|                          InheritedAbstractNamingTest._get_collection_name()) |         assert "wibble" == InheritedAbstractNamingTest._get_collection_name() | ||||||
| 
 | 
 | ||||||
|         # Mixin tests |         # Mixin tests | ||||||
|         class BaseMixin(object): |         class BaseMixin(object): | ||||||
|             meta = { |             meta = {"collection": lambda c: c.__name__.lower()} | ||||||
|                 'collection': lambda c: c.__name__.lower() |  | ||||||
|             } |  | ||||||
| 
 | 
 | ||||||
|         class OldMixinNamingConvention(Document, BaseMixin): |         class OldMixinNamingConvention(Document, BaseMixin): | ||||||
|             pass |             pass | ||||||
|         self.assertEqual('oldmixinnamingconvention', | 
 | ||||||
|                           OldMixinNamingConvention._get_collection_name()) |         assert ( | ||||||
|  |             "oldmixinnamingconvention" | ||||||
|  |             == OldMixinNamingConvention._get_collection_name() | ||||||
|  |         ) | ||||||
| 
 | 
 | ||||||
|         class BaseMixin(object): |         class BaseMixin(object): | ||||||
|             meta = { |             meta = {"collection": lambda c: c.__name__.lower()} | ||||||
|                 'collection': lambda c: c.__name__.lower() |  | ||||||
|             } |  | ||||||
| 
 | 
 | ||||||
|         class BaseDocument(Document, BaseMixin): |         class BaseDocument(Document, BaseMixin): | ||||||
|             meta = {'allow_inheritance': True} |             meta = {"allow_inheritance": True} | ||||||
| 
 | 
 | ||||||
|         class MyDocument(BaseDocument): |         class MyDocument(BaseDocument): | ||||||
|             pass |             pass | ||||||
| 
 | 
 | ||||||
|         self.assertEqual('basedocument', MyDocument._get_collection_name()) |         assert "basedocument" == MyDocument._get_collection_name() | ||||||
| 
 | 
 | ||||||
|     def test_custom_collection_name_operations(self): |     def test_custom_collection_name_operations(self): | ||||||
|         """Ensure that a collection with a specified name is used as expected. |         """Ensure that a collection with a specified name is used as expected. | ||||||
|         """ |         """ | ||||||
|         collection_name = 'personCollTest' |         collection_name = "personCollTest" | ||||||
| 
 | 
 | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             meta = {'collection': collection_name} |             meta = {"collection": collection_name} | ||||||
| 
 | 
 | ||||||
|         Person(name="Test User").save() |         Person(name="Test User").save() | ||||||
|         self.assertIn(collection_name, list_collection_names(self.db)) |         assert collection_name in list_collection_names(self.db) | ||||||
| 
 | 
 | ||||||
|         user_obj = self.db[collection_name].find_one() |         user_obj = self.db[collection_name].find_one() | ||||||
|         self.assertEqual(user_obj['name'], "Test User") |         assert user_obj["name"] == "Test User" | ||||||
| 
 | 
 | ||||||
|         user_obj = Person.objects[0] |         user_obj = Person.objects[0] | ||||||
|         self.assertEqual(user_obj.name, "Test User") |         assert user_obj.name == "Test User" | ||||||
| 
 | 
 | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|         self.assertNotIn(collection_name, list_collection_names(self.db)) |         assert collection_name not in list_collection_names(self.db) | ||||||
| 
 | 
 | ||||||
|     def test_collection_name_and_primary(self): |     def test_collection_name_and_primary(self): | ||||||
|         """Ensure that a collection with a specified name may be used. |         """Ensure that a collection with a specified name may be used. | ||||||
| @@ -354,15 +343,15 @@ class ClassMethodsTest(unittest.TestCase): | |||||||
| 
 | 
 | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             name = StringField(primary_key=True) |             name = StringField(primary_key=True) | ||||||
|             meta = {'collection': 'app'} |             meta = {"collection": "app"} | ||||||
| 
 | 
 | ||||||
|         Person(name="Test User").save() |         Person(name="Test User").save() | ||||||
| 
 | 
 | ||||||
|         user_obj = Person.objects.first() |         user_obj = Person.objects.first() | ||||||
|         self.assertEqual(user_obj.name, "Test User") |         assert user_obj.name == "Test User" | ||||||
| 
 | 
 | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| if __name__ == '__main__': | if __name__ == "__main__": | ||||||
|     unittest.main() |     unittest.main() | ||||||
							
								
								
									
										916
									
								
								tests/document/test_delta.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										916
									
								
								tests/document/test_delta.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,916 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | import unittest | ||||||
|  |  | ||||||
|  | from bson import SON | ||||||
|  | from mongoengine import * | ||||||
|  | from mongoengine.pymongo_support import list_collection_names | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestDelta(MongoDBTestCase): | ||||||
|  |     def setUp(self): | ||||||
|  |         super(TestDelta, self).setUp() | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             age = IntField() | ||||||
|  |  | ||||||
|  |             non_field = True | ||||||
|  |  | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         self.Person = Person | ||||||
|  |  | ||||||
|  |     def tearDown(self): | ||||||
|  |         for collection in list_collection_names(self.db): | ||||||
|  |             self.db.drop_collection(collection) | ||||||
|  |  | ||||||
|  |     def test_delta(self): | ||||||
|  |         self.delta(Document) | ||||||
|  |         self.delta(DynamicDocument) | ||||||
|  |  | ||||||
|  |     def delta(self, DocClass): | ||||||
|  |         class Doc(DocClass): | ||||||
|  |             string_field = StringField() | ||||||
|  |             int_field = IntField() | ||||||
|  |             dict_field = DictField() | ||||||
|  |             list_field = ListField() | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |         doc = Doc() | ||||||
|  |         doc.save() | ||||||
|  |  | ||||||
|  |         doc = Doc.objects.first() | ||||||
|  |         assert doc._get_changed_fields() == [] | ||||||
|  |         assert doc._delta() == ({}, {}) | ||||||
|  |  | ||||||
|  |         doc.string_field = "hello" | ||||||
|  |         assert doc._get_changed_fields() == ["string_field"] | ||||||
|  |         assert doc._delta() == ({"string_field": "hello"}, {}) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         doc.int_field = 1 | ||||||
|  |         assert doc._get_changed_fields() == ["int_field"] | ||||||
|  |         assert doc._delta() == ({"int_field": 1}, {}) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         dict_value = {"hello": "world", "ping": "pong"} | ||||||
|  |         doc.dict_field = dict_value | ||||||
|  |         assert doc._get_changed_fields() == ["dict_field"] | ||||||
|  |         assert doc._delta() == ({"dict_field": dict_value}, {}) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         list_value = ["1", 2, {"hello": "world"}] | ||||||
|  |         doc.list_field = list_value | ||||||
|  |         assert doc._get_changed_fields() == ["list_field"] | ||||||
|  |         assert doc._delta() == ({"list_field": list_value}, {}) | ||||||
|  |  | ||||||
|  |         # Test unsetting | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         doc.dict_field = {} | ||||||
|  |         assert doc._get_changed_fields() == ["dict_field"] | ||||||
|  |         assert doc._delta() == ({}, {"dict_field": 1}) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         doc.list_field = [] | ||||||
|  |         assert doc._get_changed_fields() == ["list_field"] | ||||||
|  |         assert doc._delta() == ({}, {"list_field": 1}) | ||||||
|  |  | ||||||
|  |     def test_delta_recursive(self): | ||||||
|  |         self.delta_recursive(Document, EmbeddedDocument) | ||||||
|  |         self.delta_recursive(DynamicDocument, EmbeddedDocument) | ||||||
|  |         self.delta_recursive(Document, DynamicEmbeddedDocument) | ||||||
|  |         self.delta_recursive(DynamicDocument, DynamicEmbeddedDocument) | ||||||
|  |  | ||||||
|  |     def delta_recursive(self, DocClass, EmbeddedClass): | ||||||
|  |         class Embedded(EmbeddedClass): | ||||||
|  |             id = StringField() | ||||||
|  |             string_field = StringField() | ||||||
|  |             int_field = IntField() | ||||||
|  |             dict_field = DictField() | ||||||
|  |             list_field = ListField() | ||||||
|  |  | ||||||
|  |         class Doc(DocClass): | ||||||
|  |             string_field = StringField() | ||||||
|  |             int_field = IntField() | ||||||
|  |             dict_field = DictField() | ||||||
|  |             list_field = ListField() | ||||||
|  |             embedded_field = EmbeddedDocumentField(Embedded) | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |         doc = Doc() | ||||||
|  |         doc.save() | ||||||
|  |  | ||||||
|  |         doc = Doc.objects.first() | ||||||
|  |         assert doc._get_changed_fields() == [] | ||||||
|  |         assert doc._delta() == ({}, {}) | ||||||
|  |  | ||||||
|  |         embedded_1 = Embedded() | ||||||
|  |         embedded_1.id = "010101" | ||||||
|  |         embedded_1.string_field = "hello" | ||||||
|  |         embedded_1.int_field = 1 | ||||||
|  |         embedded_1.dict_field = {"hello": "world"} | ||||||
|  |         embedded_1.list_field = ["1", 2, {"hello": "world"}] | ||||||
|  |         doc.embedded_field = embedded_1 | ||||||
|  |  | ||||||
|  |         assert doc._get_changed_fields() == ["embedded_field"] | ||||||
|  |  | ||||||
|  |         embedded_delta = { | ||||||
|  |             "id": "010101", | ||||||
|  |             "string_field": "hello", | ||||||
|  |             "int_field": 1, | ||||||
|  |             "dict_field": {"hello": "world"}, | ||||||
|  |             "list_field": ["1", 2, {"hello": "world"}], | ||||||
|  |         } | ||||||
|  |         assert doc.embedded_field._delta() == (embedded_delta, {}) | ||||||
|  |         assert doc._delta() == ({"embedded_field": embedded_delta}, {}) | ||||||
|  |  | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |  | ||||||
|  |         doc.embedded_field.dict_field = {} | ||||||
|  |         assert doc._get_changed_fields() == ["embedded_field.dict_field"] | ||||||
|  |         assert doc.embedded_field._delta() == ({}, {"dict_field": 1}) | ||||||
|  |         assert doc._delta() == ({}, {"embedded_field.dict_field": 1}) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |         assert doc.embedded_field.dict_field == {} | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field = [] | ||||||
|  |         assert doc._get_changed_fields() == ["embedded_field.list_field"] | ||||||
|  |         assert doc.embedded_field._delta() == ({}, {"list_field": 1}) | ||||||
|  |         assert doc._delta() == ({}, {"embedded_field.list_field": 1}) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |         assert doc.embedded_field.list_field == [] | ||||||
|  |  | ||||||
|  |         embedded_2 = Embedded() | ||||||
|  |         embedded_2.string_field = "hello" | ||||||
|  |         embedded_2.int_field = 1 | ||||||
|  |         embedded_2.dict_field = {"hello": "world"} | ||||||
|  |         embedded_2.list_field = ["1", 2, {"hello": "world"}] | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field = ["1", 2, embedded_2] | ||||||
|  |         assert doc._get_changed_fields() == ["embedded_field.list_field"] | ||||||
|  |  | ||||||
|  |         assert doc.embedded_field._delta() == ( | ||||||
|  |             { | ||||||
|  |                 "list_field": [ | ||||||
|  |                     "1", | ||||||
|  |                     2, | ||||||
|  |                     { | ||||||
|  |                         "_cls": "Embedded", | ||||||
|  |                         "string_field": "hello", | ||||||
|  |                         "dict_field": {"hello": "world"}, | ||||||
|  |                         "int_field": 1, | ||||||
|  |                         "list_field": ["1", 2, {"hello": "world"}], | ||||||
|  |                     }, | ||||||
|  |                 ] | ||||||
|  |             }, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             { | ||||||
|  |                 "embedded_field.list_field": [ | ||||||
|  |                     "1", | ||||||
|  |                     2, | ||||||
|  |                     { | ||||||
|  |                         "_cls": "Embedded", | ||||||
|  |                         "string_field": "hello", | ||||||
|  |                         "dict_field": {"hello": "world"}, | ||||||
|  |                         "int_field": 1, | ||||||
|  |                         "list_field": ["1", 2, {"hello": "world"}], | ||||||
|  |                     }, | ||||||
|  |                 ] | ||||||
|  |             }, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |  | ||||||
|  |         assert doc.embedded_field.list_field[0] == "1" | ||||||
|  |         assert doc.embedded_field.list_field[1] == 2 | ||||||
|  |         for k in doc.embedded_field.list_field[2]._fields: | ||||||
|  |             assert doc.embedded_field.list_field[2][k] == embedded_2[k] | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field[2].string_field = "world" | ||||||
|  |         assert doc._get_changed_fields() == ["embedded_field.list_field.2.string_field"] | ||||||
|  |         assert doc.embedded_field._delta() == ( | ||||||
|  |             {"list_field.2.string_field": "world"}, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             {"embedded_field.list_field.2.string_field": "world"}, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |         assert doc.embedded_field.list_field[2].string_field == "world" | ||||||
|  |  | ||||||
|  |         # Test multiple assignments | ||||||
|  |         doc.embedded_field.list_field[2].string_field = "hello world" | ||||||
|  |         doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] | ||||||
|  |         assert doc._get_changed_fields() == ["embedded_field.list_field.2"] | ||||||
|  |         assert doc.embedded_field._delta() == ( | ||||||
|  |             { | ||||||
|  |                 "list_field.2": { | ||||||
|  |                     "_cls": "Embedded", | ||||||
|  |                     "string_field": "hello world", | ||||||
|  |                     "int_field": 1, | ||||||
|  |                     "list_field": ["1", 2, {"hello": "world"}], | ||||||
|  |                     "dict_field": {"hello": "world"}, | ||||||
|  |                 } | ||||||
|  |             }, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             { | ||||||
|  |                 "embedded_field.list_field.2": { | ||||||
|  |                     "_cls": "Embedded", | ||||||
|  |                     "string_field": "hello world", | ||||||
|  |                     "int_field": 1, | ||||||
|  |                     "list_field": ["1", 2, {"hello": "world"}], | ||||||
|  |                     "dict_field": {"hello": "world"}, | ||||||
|  |                 } | ||||||
|  |             }, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |         assert doc.embedded_field.list_field[2].string_field == "hello world" | ||||||
|  |  | ||||||
|  |         # Test list native methods | ||||||
|  |         doc.embedded_field.list_field[2].list_field.pop(0) | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             {"embedded_field.list_field.2.list_field": [2, {"hello": "world"}]}, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field[2].list_field.append(1) | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             {"embedded_field.list_field.2.list_field": [2, {"hello": "world"}, 1]}, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |         assert doc.embedded_field.list_field[2].list_field == [2, {"hello": "world"}, 1] | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field[2].list_field.sort(key=str) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |         assert doc.embedded_field.list_field[2].list_field == [1, 2, {"hello": "world"}] | ||||||
|  |  | ||||||
|  |         del doc.embedded_field.list_field[2].list_field[2]["hello"] | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             {}, | ||||||
|  |             {"embedded_field.list_field.2.list_field.2.hello": 1}, | ||||||
|  |         ) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |  | ||||||
|  |         del doc.embedded_field.list_field[2].list_field | ||||||
|  |         assert doc._delta() == ({}, {"embedded_field.list_field.2.list_field": 1}) | ||||||
|  |  | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |  | ||||||
|  |         doc.dict_field["Embedded"] = embedded_1 | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |  | ||||||
|  |         doc.dict_field["Embedded"].string_field = "Hello World" | ||||||
|  |         assert doc._get_changed_fields() == ["dict_field.Embedded.string_field"] | ||||||
|  |         assert doc._delta() == ({"dict_field.Embedded.string_field": "Hello World"}, {}) | ||||||
|  |  | ||||||
|  |     def test_circular_reference_deltas(self): | ||||||
|  |         self.circular_reference_deltas(Document, Document) | ||||||
|  |         self.circular_reference_deltas(Document, DynamicDocument) | ||||||
|  |         self.circular_reference_deltas(DynamicDocument, Document) | ||||||
|  |         self.circular_reference_deltas(DynamicDocument, DynamicDocument) | ||||||
|  |  | ||||||
|  |     def circular_reference_deltas(self, DocClass1, DocClass2): | ||||||
|  |         class Person(DocClass1): | ||||||
|  |             name = StringField() | ||||||
|  |             owns = ListField(ReferenceField("Organization")) | ||||||
|  |  | ||||||
|  |         class Organization(DocClass2): | ||||||
|  |             name = StringField() | ||||||
|  |             owner = ReferenceField("Person") | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |         Organization.drop_collection() | ||||||
|  |  | ||||||
|  |         person = Person(name="owner").save() | ||||||
|  |         organization = Organization(name="company").save() | ||||||
|  |  | ||||||
|  |         person.owns.append(organization) | ||||||
|  |         organization.owner = person | ||||||
|  |  | ||||||
|  |         person.save() | ||||||
|  |         organization.save() | ||||||
|  |  | ||||||
|  |         p = Person.objects[0].select_related() | ||||||
|  |         o = Organization.objects.first() | ||||||
|  |         assert p.owns[0] == o | ||||||
|  |         assert o.owner == p | ||||||
|  |  | ||||||
|  |     def test_circular_reference_deltas_2(self): | ||||||
|  |         self.circular_reference_deltas_2(Document, Document) | ||||||
|  |         self.circular_reference_deltas_2(Document, DynamicDocument) | ||||||
|  |         self.circular_reference_deltas_2(DynamicDocument, Document) | ||||||
|  |         self.circular_reference_deltas_2(DynamicDocument, DynamicDocument) | ||||||
|  |  | ||||||
|  |     def circular_reference_deltas_2(self, DocClass1, DocClass2, dbref=True): | ||||||
|  |         class Person(DocClass1): | ||||||
|  |             name = StringField() | ||||||
|  |             owns = ListField(ReferenceField("Organization", dbref=dbref)) | ||||||
|  |             employer = ReferenceField("Organization", dbref=dbref) | ||||||
|  |  | ||||||
|  |         class Organization(DocClass2): | ||||||
|  |             name = StringField() | ||||||
|  |             owner = ReferenceField("Person", dbref=dbref) | ||||||
|  |             employees = ListField(ReferenceField("Person", dbref=dbref)) | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |         Organization.drop_collection() | ||||||
|  |  | ||||||
|  |         person = Person(name="owner").save() | ||||||
|  |         employee = Person(name="employee").save() | ||||||
|  |         organization = Organization(name="company").save() | ||||||
|  |  | ||||||
|  |         person.owns.append(organization) | ||||||
|  |         organization.owner = person | ||||||
|  |  | ||||||
|  |         organization.employees.append(employee) | ||||||
|  |         employee.employer = organization | ||||||
|  |  | ||||||
|  |         person.save() | ||||||
|  |         organization.save() | ||||||
|  |         employee.save() | ||||||
|  |  | ||||||
|  |         p = Person.objects.get(name="owner") | ||||||
|  |         e = Person.objects.get(name="employee") | ||||||
|  |         o = Organization.objects.first() | ||||||
|  |  | ||||||
|  |         assert p.owns[0] == o | ||||||
|  |         assert o.owner == p | ||||||
|  |         assert e.employer == o | ||||||
|  |  | ||||||
|  |         return person, organization, employee | ||||||
|  |  | ||||||
|  |     def test_delta_db_field(self): | ||||||
|  |         self.delta_db_field(Document) | ||||||
|  |         self.delta_db_field(DynamicDocument) | ||||||
|  |  | ||||||
|  |     def delta_db_field(self, DocClass): | ||||||
|  |         class Doc(DocClass): | ||||||
|  |             string_field = StringField(db_field="db_string_field") | ||||||
|  |             int_field = IntField(db_field="db_int_field") | ||||||
|  |             dict_field = DictField(db_field="db_dict_field") | ||||||
|  |             list_field = ListField(db_field="db_list_field") | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |         doc = Doc() | ||||||
|  |         doc.save() | ||||||
|  |  | ||||||
|  |         doc = Doc.objects.first() | ||||||
|  |         assert doc._get_changed_fields() == [] | ||||||
|  |         assert doc._delta() == ({}, {}) | ||||||
|  |  | ||||||
|  |         doc.string_field = "hello" | ||||||
|  |         assert doc._get_changed_fields() == ["db_string_field"] | ||||||
|  |         assert doc._delta() == ({"db_string_field": "hello"}, {}) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         doc.int_field = 1 | ||||||
|  |         assert doc._get_changed_fields() == ["db_int_field"] | ||||||
|  |         assert doc._delta() == ({"db_int_field": 1}, {}) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         dict_value = {"hello": "world", "ping": "pong"} | ||||||
|  |         doc.dict_field = dict_value | ||||||
|  |         assert doc._get_changed_fields() == ["db_dict_field"] | ||||||
|  |         assert doc._delta() == ({"db_dict_field": dict_value}, {}) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         list_value = ["1", 2, {"hello": "world"}] | ||||||
|  |         doc.list_field = list_value | ||||||
|  |         assert doc._get_changed_fields() == ["db_list_field"] | ||||||
|  |         assert doc._delta() == ({"db_list_field": list_value}, {}) | ||||||
|  |  | ||||||
|  |         # Test unsetting | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         doc.dict_field = {} | ||||||
|  |         assert doc._get_changed_fields() == ["db_dict_field"] | ||||||
|  |         assert doc._delta() == ({}, {"db_dict_field": 1}) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         doc.list_field = [] | ||||||
|  |         assert doc._get_changed_fields() == ["db_list_field"] | ||||||
|  |         assert doc._delta() == ({}, {"db_list_field": 1}) | ||||||
|  |  | ||||||
|  |         # Test it saves that data | ||||||
|  |         doc = Doc() | ||||||
|  |         doc.save() | ||||||
|  |  | ||||||
|  |         doc.string_field = "hello" | ||||||
|  |         doc.int_field = 1 | ||||||
|  |         doc.dict_field = {"hello": "world"} | ||||||
|  |         doc.list_field = ["1", 2, {"hello": "world"}] | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |  | ||||||
|  |         assert doc.string_field == "hello" | ||||||
|  |         assert doc.int_field == 1 | ||||||
|  |         assert doc.dict_field == {"hello": "world"} | ||||||
|  |         assert doc.list_field == ["1", 2, {"hello": "world"}] | ||||||
|  |  | ||||||
|  |     def test_delta_recursive_db_field(self): | ||||||
|  |         self.delta_recursive_db_field(Document, EmbeddedDocument) | ||||||
|  |         self.delta_recursive_db_field(Document, DynamicEmbeddedDocument) | ||||||
|  |         self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument) | ||||||
|  |         self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument) | ||||||
|  |  | ||||||
|  |     def delta_recursive_db_field(self, DocClass, EmbeddedClass): | ||||||
|  |         class Embedded(EmbeddedClass): | ||||||
|  |             string_field = StringField(db_field="db_string_field") | ||||||
|  |             int_field = IntField(db_field="db_int_field") | ||||||
|  |             dict_field = DictField(db_field="db_dict_field") | ||||||
|  |             list_field = ListField(db_field="db_list_field") | ||||||
|  |  | ||||||
|  |         class Doc(DocClass): | ||||||
|  |             string_field = StringField(db_field="db_string_field") | ||||||
|  |             int_field = IntField(db_field="db_int_field") | ||||||
|  |             dict_field = DictField(db_field="db_dict_field") | ||||||
|  |             list_field = ListField(db_field="db_list_field") | ||||||
|  |             embedded_field = EmbeddedDocumentField( | ||||||
|  |                 Embedded, db_field="db_embedded_field" | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |         doc = Doc() | ||||||
|  |         doc.save() | ||||||
|  |  | ||||||
|  |         doc = Doc.objects.first() | ||||||
|  |         assert doc._get_changed_fields() == [] | ||||||
|  |         assert doc._delta() == ({}, {}) | ||||||
|  |  | ||||||
|  |         embedded_1 = Embedded() | ||||||
|  |         embedded_1.string_field = "hello" | ||||||
|  |         embedded_1.int_field = 1 | ||||||
|  |         embedded_1.dict_field = {"hello": "world"} | ||||||
|  |         embedded_1.list_field = ["1", 2, {"hello": "world"}] | ||||||
|  |         doc.embedded_field = embedded_1 | ||||||
|  |  | ||||||
|  |         assert doc._get_changed_fields() == ["db_embedded_field"] | ||||||
|  |  | ||||||
|  |         embedded_delta = { | ||||||
|  |             "db_string_field": "hello", | ||||||
|  |             "db_int_field": 1, | ||||||
|  |             "db_dict_field": {"hello": "world"}, | ||||||
|  |             "db_list_field": ["1", 2, {"hello": "world"}], | ||||||
|  |         } | ||||||
|  |         assert doc.embedded_field._delta() == (embedded_delta, {}) | ||||||
|  |         assert doc._delta() == ({"db_embedded_field": embedded_delta}, {}) | ||||||
|  |  | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |  | ||||||
|  |         doc.embedded_field.dict_field = {} | ||||||
|  |         assert doc._get_changed_fields() == ["db_embedded_field.db_dict_field"] | ||||||
|  |         assert doc.embedded_field._delta() == ({}, {"db_dict_field": 1}) | ||||||
|  |         assert doc._delta() == ({}, {"db_embedded_field.db_dict_field": 1}) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |         assert doc.embedded_field.dict_field == {} | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field = [] | ||||||
|  |         assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"] | ||||||
|  |         assert doc.embedded_field._delta() == ({}, {"db_list_field": 1}) | ||||||
|  |         assert doc._delta() == ({}, {"db_embedded_field.db_list_field": 1}) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |         assert doc.embedded_field.list_field == [] | ||||||
|  |  | ||||||
|  |         embedded_2 = Embedded() | ||||||
|  |         embedded_2.string_field = "hello" | ||||||
|  |         embedded_2.int_field = 1 | ||||||
|  |         embedded_2.dict_field = {"hello": "world"} | ||||||
|  |         embedded_2.list_field = ["1", 2, {"hello": "world"}] | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field = ["1", 2, embedded_2] | ||||||
|  |         assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"] | ||||||
|  |         assert doc.embedded_field._delta() == ( | ||||||
|  |             { | ||||||
|  |                 "db_list_field": [ | ||||||
|  |                     "1", | ||||||
|  |                     2, | ||||||
|  |                     { | ||||||
|  |                         "_cls": "Embedded", | ||||||
|  |                         "db_string_field": "hello", | ||||||
|  |                         "db_dict_field": {"hello": "world"}, | ||||||
|  |                         "db_int_field": 1, | ||||||
|  |                         "db_list_field": ["1", 2, {"hello": "world"}], | ||||||
|  |                     }, | ||||||
|  |                 ] | ||||||
|  |             }, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             { | ||||||
|  |                 "db_embedded_field.db_list_field": [ | ||||||
|  |                     "1", | ||||||
|  |                     2, | ||||||
|  |                     { | ||||||
|  |                         "_cls": "Embedded", | ||||||
|  |                         "db_string_field": "hello", | ||||||
|  |                         "db_dict_field": {"hello": "world"}, | ||||||
|  |                         "db_int_field": 1, | ||||||
|  |                         "db_list_field": ["1", 2, {"hello": "world"}], | ||||||
|  |                     }, | ||||||
|  |                 ] | ||||||
|  |             }, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |  | ||||||
|  |         assert doc.embedded_field.list_field[0] == "1" | ||||||
|  |         assert doc.embedded_field.list_field[1] == 2 | ||||||
|  |         for k in doc.embedded_field.list_field[2]._fields: | ||||||
|  |             assert doc.embedded_field.list_field[2][k] == embedded_2[k] | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field[2].string_field = "world" | ||||||
|  |         assert doc._get_changed_fields() == [ | ||||||
|  |             "db_embedded_field.db_list_field.2.db_string_field" | ||||||
|  |         ] | ||||||
|  |         assert doc.embedded_field._delta() == ( | ||||||
|  |             {"db_list_field.2.db_string_field": "world"}, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             {"db_embedded_field.db_list_field.2.db_string_field": "world"}, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |         assert doc.embedded_field.list_field[2].string_field == "world" | ||||||
|  |  | ||||||
|  |         # Test multiple assignments | ||||||
|  |         doc.embedded_field.list_field[2].string_field = "hello world" | ||||||
|  |         doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] | ||||||
|  |         assert doc._get_changed_fields() == ["db_embedded_field.db_list_field.2"] | ||||||
|  |         assert doc.embedded_field._delta() == ( | ||||||
|  |             { | ||||||
|  |                 "db_list_field.2": { | ||||||
|  |                     "_cls": "Embedded", | ||||||
|  |                     "db_string_field": "hello world", | ||||||
|  |                     "db_int_field": 1, | ||||||
|  |                     "db_list_field": ["1", 2, {"hello": "world"}], | ||||||
|  |                     "db_dict_field": {"hello": "world"}, | ||||||
|  |                 } | ||||||
|  |             }, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             { | ||||||
|  |                 "db_embedded_field.db_list_field.2": { | ||||||
|  |                     "_cls": "Embedded", | ||||||
|  |                     "db_string_field": "hello world", | ||||||
|  |                     "db_int_field": 1, | ||||||
|  |                     "db_list_field": ["1", 2, {"hello": "world"}], | ||||||
|  |                     "db_dict_field": {"hello": "world"}, | ||||||
|  |                 } | ||||||
|  |             }, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |         assert doc.embedded_field.list_field[2].string_field == "hello world" | ||||||
|  |  | ||||||
|  |         # Test list native methods | ||||||
|  |         doc.embedded_field.list_field[2].list_field.pop(0) | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             { | ||||||
|  |                 "db_embedded_field.db_list_field.2.db_list_field": [ | ||||||
|  |                     2, | ||||||
|  |                     {"hello": "world"}, | ||||||
|  |                 ] | ||||||
|  |             }, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field[2].list_field.append(1) | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             { | ||||||
|  |                 "db_embedded_field.db_list_field.2.db_list_field": [ | ||||||
|  |                     2, | ||||||
|  |                     {"hello": "world"}, | ||||||
|  |                     1, | ||||||
|  |                 ] | ||||||
|  |             }, | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |         assert doc.embedded_field.list_field[2].list_field == [2, {"hello": "world"}, 1] | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field[2].list_field.sort(key=str) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |         assert doc.embedded_field.list_field[2].list_field == [1, 2, {"hello": "world"}] | ||||||
|  |  | ||||||
|  |         del doc.embedded_field.list_field[2].list_field[2]["hello"] | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             {}, | ||||||
|  |             {"db_embedded_field.db_list_field.2.db_list_field.2.hello": 1}, | ||||||
|  |         ) | ||||||
|  |         doc.save() | ||||||
|  |         doc = doc.reload(10) | ||||||
|  |  | ||||||
|  |         del doc.embedded_field.list_field[2].list_field | ||||||
|  |         assert doc._delta() == ( | ||||||
|  |             {}, | ||||||
|  |             {"db_embedded_field.db_list_field.2.db_list_field": 1}, | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def test_delta_for_dynamic_documents(self): | ||||||
|  |         class Person(DynamicDocument): | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         p = Person(name="James", age=34) | ||||||
|  |         assert p._delta() == ( | ||||||
|  |             SON([("_cls", "Person"), ("name", "James"), ("age", 34)]), | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         p.doc = 123 | ||||||
|  |         del p.doc | ||||||
|  |         assert p._delta() == ( | ||||||
|  |             SON([("_cls", "Person"), ("name", "James"), ("age", 34)]), | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         p = Person() | ||||||
|  |         p.name = "Dean" | ||||||
|  |         p.age = 22 | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         p.age = 24 | ||||||
|  |         assert p.age == 24 | ||||||
|  |         assert p._get_changed_fields() == ["age"] | ||||||
|  |         assert p._delta() == ({"age": 24}, {}) | ||||||
|  |  | ||||||
|  |         p = Person.objects(age=22).get() | ||||||
|  |         p.age = 24 | ||||||
|  |         assert p.age == 24 | ||||||
|  |         assert p._get_changed_fields() == ["age"] | ||||||
|  |         assert p._delta() == ({"age": 24}, {}) | ||||||
|  |  | ||||||
|  |         p.save() | ||||||
|  |         assert 1 == Person.objects(age=24).count() | ||||||
|  |  | ||||||
|  |     def test_dynamic_delta(self): | ||||||
|  |         class Doc(DynamicDocument): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |         doc = Doc() | ||||||
|  |         doc.save() | ||||||
|  |  | ||||||
|  |         doc = Doc.objects.first() | ||||||
|  |         assert doc._get_changed_fields() == [] | ||||||
|  |         assert doc._delta() == ({}, {}) | ||||||
|  |  | ||||||
|  |         doc.string_field = "hello" | ||||||
|  |         assert doc._get_changed_fields() == ["string_field"] | ||||||
|  |         assert doc._delta() == ({"string_field": "hello"}, {}) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         doc.int_field = 1 | ||||||
|  |         assert doc._get_changed_fields() == ["int_field"] | ||||||
|  |         assert doc._delta() == ({"int_field": 1}, {}) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         dict_value = {"hello": "world", "ping": "pong"} | ||||||
|  |         doc.dict_field = dict_value | ||||||
|  |         assert doc._get_changed_fields() == ["dict_field"] | ||||||
|  |         assert doc._delta() == ({"dict_field": dict_value}, {}) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         list_value = ["1", 2, {"hello": "world"}] | ||||||
|  |         doc.list_field = list_value | ||||||
|  |         assert doc._get_changed_fields() == ["list_field"] | ||||||
|  |         assert doc._delta() == ({"list_field": list_value}, {}) | ||||||
|  |  | ||||||
|  |         # Test unsetting | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         doc.dict_field = {} | ||||||
|  |         assert doc._get_changed_fields() == ["dict_field"] | ||||||
|  |         assert doc._delta() == ({}, {"dict_field": 1}) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         doc.list_field = [] | ||||||
|  |         assert doc._get_changed_fields() == ["list_field"] | ||||||
|  |         assert doc._delta() == ({}, {"list_field": 1}) | ||||||
|  |  | ||||||
|  |     def test_delta_with_dbref_true(self): | ||||||
|  |         person, organization, employee = self.circular_reference_deltas_2( | ||||||
|  |             Document, Document, True | ||||||
|  |         ) | ||||||
|  |         employee.name = "test" | ||||||
|  |  | ||||||
|  |         assert organization._get_changed_fields() == [] | ||||||
|  |  | ||||||
|  |         updates, removals = organization._delta() | ||||||
|  |         assert {} == removals | ||||||
|  |         assert {} == updates | ||||||
|  |  | ||||||
|  |         organization.employees.append(person) | ||||||
|  |         updates, removals = organization._delta() | ||||||
|  |         assert {} == removals | ||||||
|  |         assert "employees" in updates | ||||||
|  |  | ||||||
|  |     def test_delta_with_dbref_false(self): | ||||||
|  |         person, organization, employee = self.circular_reference_deltas_2( | ||||||
|  |             Document, Document, False | ||||||
|  |         ) | ||||||
|  |         employee.name = "test" | ||||||
|  |  | ||||||
|  |         assert organization._get_changed_fields() == [] | ||||||
|  |  | ||||||
|  |         updates, removals = organization._delta() | ||||||
|  |         assert {} == removals | ||||||
|  |         assert {} == updates | ||||||
|  |  | ||||||
|  |         organization.employees.append(person) | ||||||
|  |         updates, removals = organization._delta() | ||||||
|  |         assert {} == removals | ||||||
|  |         assert "employees" in updates | ||||||
|  |  | ||||||
|  |     def test_nested_nested_fields_mark_as_changed(self): | ||||||
|  |         class EmbeddedDoc(EmbeddedDocument): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class MyDoc(Document): | ||||||
|  |             subs = MapField(MapField(EmbeddedDocumentField(EmbeddedDoc))) | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         MyDoc.drop_collection() | ||||||
|  |  | ||||||
|  |         mydoc = MyDoc( | ||||||
|  |             name="testcase1", subs={"a": {"b": EmbeddedDoc(name="foo")}} | ||||||
|  |         ).save() | ||||||
|  |  | ||||||
|  |         mydoc = MyDoc.objects.first() | ||||||
|  |         subdoc = mydoc.subs["a"]["b"] | ||||||
|  |         subdoc.name = "bar" | ||||||
|  |  | ||||||
|  |         assert ["name"] == subdoc._get_changed_fields() | ||||||
|  |         assert ["subs.a.b.name"] == mydoc._get_changed_fields() | ||||||
|  |  | ||||||
|  |         mydoc._clear_changed_fields() | ||||||
|  |         assert [] == mydoc._get_changed_fields() | ||||||
|  |  | ||||||
|  |     def test_lower_level_mark_as_changed(self): | ||||||
|  |         class EmbeddedDoc(EmbeddedDocument): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class MyDoc(Document): | ||||||
|  |             subs = MapField(EmbeddedDocumentField(EmbeddedDoc)) | ||||||
|  |  | ||||||
|  |         MyDoc.drop_collection() | ||||||
|  |  | ||||||
|  |         MyDoc().save() | ||||||
|  |  | ||||||
|  |         mydoc = MyDoc.objects.first() | ||||||
|  |         mydoc.subs["a"] = EmbeddedDoc() | ||||||
|  |         assert ["subs.a"] == mydoc._get_changed_fields() | ||||||
|  |  | ||||||
|  |         subdoc = mydoc.subs["a"] | ||||||
|  |         subdoc.name = "bar" | ||||||
|  |  | ||||||
|  |         assert ["name"] == subdoc._get_changed_fields() | ||||||
|  |         assert ["subs.a"] == mydoc._get_changed_fields() | ||||||
|  |         mydoc.save() | ||||||
|  |  | ||||||
|  |         mydoc._clear_changed_fields() | ||||||
|  |         assert [] == mydoc._get_changed_fields() | ||||||
|  |  | ||||||
|  |     def test_upper_level_mark_as_changed(self): | ||||||
|  |         class EmbeddedDoc(EmbeddedDocument): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class MyDoc(Document): | ||||||
|  |             subs = MapField(EmbeddedDocumentField(EmbeddedDoc)) | ||||||
|  |  | ||||||
|  |         MyDoc.drop_collection() | ||||||
|  |  | ||||||
|  |         MyDoc(subs={"a": EmbeddedDoc(name="foo")}).save() | ||||||
|  |  | ||||||
|  |         mydoc = MyDoc.objects.first() | ||||||
|  |         subdoc = mydoc.subs["a"] | ||||||
|  |         subdoc.name = "bar" | ||||||
|  |  | ||||||
|  |         assert ["name"] == subdoc._get_changed_fields() | ||||||
|  |         assert ["subs.a.name"] == mydoc._get_changed_fields() | ||||||
|  |  | ||||||
|  |         mydoc.subs["a"] = EmbeddedDoc() | ||||||
|  |         assert ["subs.a"] == mydoc._get_changed_fields() | ||||||
|  |         mydoc.save() | ||||||
|  |  | ||||||
|  |         mydoc._clear_changed_fields() | ||||||
|  |         assert [] == mydoc._get_changed_fields() | ||||||
|  |  | ||||||
|  |     def test_referenced_object_changed_attributes(self): | ||||||
|  |         """Ensures that when you save a new reference to a field, the referenced object isn't altered""" | ||||||
|  |  | ||||||
|  |         class Organization(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class User(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             org = ReferenceField("Organization", required=True) | ||||||
|  |  | ||||||
|  |         Organization.drop_collection() | ||||||
|  |         User.drop_collection() | ||||||
|  |  | ||||||
|  |         org1 = Organization(name="Org 1") | ||||||
|  |         org1.save() | ||||||
|  |  | ||||||
|  |         org2 = Organization(name="Org 2") | ||||||
|  |         org2.save() | ||||||
|  |  | ||||||
|  |         user = User(name="Fred", org=org1) | ||||||
|  |         user.save() | ||||||
|  |  | ||||||
|  |         org1.reload() | ||||||
|  |         org2.reload() | ||||||
|  |         user.reload() | ||||||
|  |         assert org1.name == "Org 1" | ||||||
|  |         assert org2.name == "Org 2" | ||||||
|  |         assert user.name == "Fred" | ||||||
|  |  | ||||||
|  |         user.name = "Harold" | ||||||
|  |         user.org = org2 | ||||||
|  |  | ||||||
|  |         org2.name = "New Org 2" | ||||||
|  |         assert org2.name == "New Org 2" | ||||||
|  |  | ||||||
|  |         user.save() | ||||||
|  |         org2.save() | ||||||
|  |  | ||||||
|  |         assert org2.name == "New Org 2" | ||||||
|  |         org2.reload() | ||||||
|  |         assert org2.name == "New Org 2" | ||||||
|  |  | ||||||
|  |     def test_delta_for_nested_map_fields(self): | ||||||
|  |         class UInfoDocument(Document): | ||||||
|  |             phone = StringField() | ||||||
|  |  | ||||||
|  |         class EmbeddedRole(EmbeddedDocument): | ||||||
|  |             type = StringField() | ||||||
|  |  | ||||||
|  |         class EmbeddedUser(EmbeddedDocument): | ||||||
|  |             name = StringField() | ||||||
|  |             roles = MapField(field=EmbeddedDocumentField(EmbeddedRole)) | ||||||
|  |             rolist = ListField(field=EmbeddedDocumentField(EmbeddedRole)) | ||||||
|  |             info = ReferenceField(UInfoDocument) | ||||||
|  |  | ||||||
|  |         class Doc(Document): | ||||||
|  |             users = MapField(field=EmbeddedDocumentField(EmbeddedUser)) | ||||||
|  |             num = IntField(default=-1) | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |  | ||||||
|  |         doc = Doc(num=1) | ||||||
|  |         doc.users["007"] = EmbeddedUser(name="Agent007") | ||||||
|  |         doc.save() | ||||||
|  |  | ||||||
|  |         uinfo = UInfoDocument(phone="79089269066") | ||||||
|  |         uinfo.save() | ||||||
|  |  | ||||||
|  |         d = Doc.objects(num=1).first() | ||||||
|  |         d.users["007"]["roles"]["666"] = EmbeddedRole(type="superadmin") | ||||||
|  |         d.users["007"]["rolist"].append(EmbeddedRole(type="oops")) | ||||||
|  |         d.users["007"]["info"] = uinfo | ||||||
|  |         delta = d._delta() | ||||||
|  |         assert True == ("users.007.roles.666" in delta[0]) | ||||||
|  |         assert True == ("users.007.rolist" in delta[0]) | ||||||
|  |         assert True == ("users.007.info" in delta[0]) | ||||||
|  |         assert "superadmin" == delta[0]["users.007.roles.666"]["type"] | ||||||
|  |         assert "oops" == delta[0]["users.007.rolist"][0]["type"] | ||||||
|  |         assert uinfo.id == delta[0]["users.007.info"] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | if __name__ == "__main__": | ||||||
|  |     unittest.main() | ||||||
| @@ -1,19 +1,20 @@ | |||||||
| import unittest | import unittest | ||||||
| 
 | 
 | ||||||
|  | import pytest | ||||||
|  | 
 | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| from tests.utils import MongoDBTestCase | from tests.utils import MongoDBTestCase | ||||||
| 
 | 
 | ||||||
| __all__ = ("TestDynamicDocument", ) | __all__ = ("TestDynamicDocument",) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class TestDynamicDocument(MongoDBTestCase): | class TestDynamicDocument(MongoDBTestCase): | ||||||
| 
 |  | ||||||
|     def setUp(self): |     def setUp(self): | ||||||
|         super(TestDynamicDocument, self).setUp() |         super(TestDynamicDocument, self).setUp() | ||||||
| 
 | 
 | ||||||
|         class Person(DynamicDocument): |         class Person(DynamicDocument): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             meta = {'allow_inheritance': True} |             meta = {"allow_inheritance": True} | ||||||
| 
 | 
 | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
| 
 | 
 | ||||||
| @@ -26,16 +27,15 @@ class TestDynamicDocument(MongoDBTestCase): | |||||||
|         p.name = "James" |         p.name = "James" | ||||||
|         p.age = 34 |         p.age = 34 | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(p.to_mongo(), {"_cls": "Person", "name": "James", |         assert p.to_mongo() == {"_cls": "Person", "name": "James", "age": 34} | ||||||
|                                         "age": 34}) |         assert p.to_mongo().keys() == ["_cls", "name", "age"] | ||||||
|         self.assertEqual(p.to_mongo().keys(), ["_cls", "name", "age"]) |  | ||||||
|         p.save() |         p.save() | ||||||
|         self.assertEqual(p.to_mongo().keys(), ["_id", "_cls", "name", "age"]) |         assert p.to_mongo().keys() == ["_id", "_cls", "name", "age"] | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(self.Person.objects.first().age, 34) |         assert self.Person.objects.first().age == 34 | ||||||
| 
 | 
 | ||||||
|         # Confirm no changes to self.Person |         # Confirm no changes to self.Person | ||||||
|         self.assertFalse(hasattr(self.Person, 'age')) |         assert not hasattr(self.Person, "age") | ||||||
| 
 | 
 | ||||||
|     def test_change_scope_of_variable(self): |     def test_change_scope_of_variable(self): | ||||||
|         """Test changing the scope of a dynamic field has no adverse effects""" |         """Test changing the scope of a dynamic field has no adverse effects""" | ||||||
| @@ -45,11 +45,11 @@ class TestDynamicDocument(MongoDBTestCase): | |||||||
|         p.save() |         p.save() | ||||||
| 
 | 
 | ||||||
|         p = self.Person.objects.get() |         p = self.Person.objects.get() | ||||||
|         p.misc = {'hello': 'world'} |         p.misc = {"hello": "world"} | ||||||
|         p.save() |         p.save() | ||||||
| 
 | 
 | ||||||
|         p = self.Person.objects.get() |         p = self.Person.objects.get() | ||||||
|         self.assertEqual(p.misc, {'hello': 'world'}) |         assert p.misc == {"hello": "world"} | ||||||
| 
 | 
 | ||||||
|     def test_delete_dynamic_field(self): |     def test_delete_dynamic_field(self): | ||||||
|         """Test deleting a dynamic field works""" |         """Test deleting a dynamic field works""" | ||||||
| @@ -60,23 +60,23 @@ class TestDynamicDocument(MongoDBTestCase): | |||||||
|         p.save() |         p.save() | ||||||
| 
 | 
 | ||||||
|         p = self.Person.objects.get() |         p = self.Person.objects.get() | ||||||
|         p.misc = {'hello': 'world'} |         p.misc = {"hello": "world"} | ||||||
|         p.save() |         p.save() | ||||||
| 
 | 
 | ||||||
|         p = self.Person.objects.get() |         p = self.Person.objects.get() | ||||||
|         self.assertEqual(p.misc, {'hello': 'world'}) |         assert p.misc == {"hello": "world"} | ||||||
|         collection = self.db[self.Person._get_collection_name()] |         collection = self.db[self.Person._get_collection_name()] | ||||||
|         obj = collection.find_one() |         obj = collection.find_one() | ||||||
|         self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name']) |         assert sorted(obj.keys()) == ["_cls", "_id", "misc", "name"] | ||||||
| 
 | 
 | ||||||
|         del p.misc |         del p.misc | ||||||
|         p.save() |         p.save() | ||||||
| 
 | 
 | ||||||
|         p = self.Person.objects.get() |         p = self.Person.objects.get() | ||||||
|         self.assertFalse(hasattr(p, 'misc')) |         assert not hasattr(p, "misc") | ||||||
| 
 | 
 | ||||||
|         obj = collection.find_one() |         obj = collection.find_one() | ||||||
|         self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'name']) |         assert sorted(obj.keys()) == ["_cls", "_id", "name"] | ||||||
| 
 | 
 | ||||||
|     def test_reload_after_unsetting(self): |     def test_reload_after_unsetting(self): | ||||||
|         p = self.Person() |         p = self.Person() | ||||||
| @@ -90,78 +90,55 @@ class TestDynamicDocument(MongoDBTestCase): | |||||||
|         p = self.Person.objects.create() |         p = self.Person.objects.create() | ||||||
|         p.update(age=1) |         p.update(age=1) | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(len(p._data), 3) |         assert len(p._data) == 3 | ||||||
|         self.assertEqual(sorted(p._data.keys()), ['_cls', 'id', 'name']) |         assert sorted(p._data.keys()) == ["_cls", "id", "name"] | ||||||
| 
 | 
 | ||||||
|         p.reload() |         p.reload() | ||||||
|         self.assertEqual(len(p._data), 4) |         assert len(p._data) == 4 | ||||||
|         self.assertEqual(sorted(p._data.keys()), ['_cls', 'age', 'id', 'name']) |         assert sorted(p._data.keys()) == ["_cls", "age", "id", "name"] | ||||||
| 
 | 
 | ||||||
|     def test_fields_without_underscore(self): |     def test_fields_without_underscore(self): | ||||||
|         """Ensure we can query dynamic fields""" |         """Ensure we can query dynamic fields""" | ||||||
|         Person = self.Person |         Person = self.Person | ||||||
| 
 | 
 | ||||||
|         p = self.Person(name='Dean') |         p = self.Person(name="Dean") | ||||||
|         p.save() |         p.save() | ||||||
| 
 | 
 | ||||||
|         raw_p = Person.objects.as_pymongo().get(id=p.id) |         raw_p = Person.objects.as_pymongo().get(id=p.id) | ||||||
|         self.assertEqual( |         assert raw_p == {"_cls": u"Person", "_id": p.id, "name": u"Dean"} | ||||||
|             raw_p, |  | ||||||
|             { |  | ||||||
|                 '_cls': u'Person', |  | ||||||
|                 '_id': p.id, |  | ||||||
|                 'name': u'Dean' |  | ||||||
|              } |  | ||||||
|         ) |  | ||||||
| 
 | 
 | ||||||
|         p.name = 'OldDean' |         p.name = "OldDean" | ||||||
|         p.newattr = 'garbage' |         p.newattr = "garbage" | ||||||
|         p.save() |         p.save() | ||||||
|         raw_p = Person.objects.as_pymongo().get(id=p.id) |         raw_p = Person.objects.as_pymongo().get(id=p.id) | ||||||
|         self.assertEqual( |         assert raw_p == { | ||||||
|             raw_p, |             "_cls": u"Person", | ||||||
|             { |             "_id": p.id, | ||||||
|                 '_cls': u'Person', |             "name": "OldDean", | ||||||
|                 '_id': p.id, |             "newattr": u"garbage", | ||||||
|                 'name': 'OldDean', |         } | ||||||
|                 'newattr': u'garbage' |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
| 
 | 
 | ||||||
|     def test_fields_containing_underscore(self): |     def test_fields_containing_underscore(self): | ||||||
|         """Ensure we can query dynamic fields""" |         """Ensure we can query dynamic fields""" | ||||||
|  | 
 | ||||||
|         class WeirdPerson(DynamicDocument): |         class WeirdPerson(DynamicDocument): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             _name = StringField() |             _name = StringField() | ||||||
| 
 | 
 | ||||||
|         WeirdPerson.drop_collection() |         WeirdPerson.drop_collection() | ||||||
| 
 | 
 | ||||||
|         p = WeirdPerson(name='Dean', _name='Dean') |         p = WeirdPerson(name="Dean", _name="Dean") | ||||||
|         p.save() |         p.save() | ||||||
| 
 | 
 | ||||||
|         raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id) |         raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id) | ||||||
|         self.assertEqual( |         assert raw_p == {"_id": p.id, "_name": u"Dean", "name": u"Dean"} | ||||||
|             raw_p, |  | ||||||
|             { |  | ||||||
|                 '_id': p.id, |  | ||||||
|                 '_name': u'Dean', |  | ||||||
|                 'name': u'Dean' |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
| 
 | 
 | ||||||
|         p.name = 'OldDean' |         p.name = "OldDean" | ||||||
|         p._name = 'NewDean' |         p._name = "NewDean" | ||||||
|         p._newattr1 = 'garbage'    # Unknown fields won't be added |         p._newattr1 = "garbage"  # Unknown fields won't be added | ||||||
|         p.save() |         p.save() | ||||||
|         raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id) |         raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id) | ||||||
|         self.assertEqual( |         assert raw_p == {"_id": p.id, "_name": u"NewDean", "name": u"OldDean"} | ||||||
|             raw_p, |  | ||||||
|             { |  | ||||||
|                 '_id': p.id, |  | ||||||
|                 '_name': u'NewDean', |  | ||||||
|                 'name': u'OldDean', |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
| 
 | 
 | ||||||
|     def test_dynamic_document_queries(self): |     def test_dynamic_document_queries(self): | ||||||
|         """Ensure we can query dynamic fields""" |         """Ensure we can query dynamic fields""" | ||||||
| @@ -170,10 +147,10 @@ class TestDynamicDocument(MongoDBTestCase): | |||||||
|         p.age = 22 |         p.age = 22 | ||||||
|         p.save() |         p.save() | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(1, self.Person.objects(age=22).count()) |         assert 1 == self.Person.objects(age=22).count() | ||||||
|         p = self.Person.objects(age=22) |         p = self.Person.objects(age=22) | ||||||
|         p = p.get() |         p = p.get() | ||||||
|         self.assertEqual(22, p.age) |         assert 22 == p.age | ||||||
| 
 | 
 | ||||||
|     def test_complex_dynamic_document_queries(self): |     def test_complex_dynamic_document_queries(self): | ||||||
|         class Person(DynamicDocument): |         class Person(DynamicDocument): | ||||||
| @@ -193,26 +170,25 @@ class TestDynamicDocument(MongoDBTestCase): | |||||||
|         p2.age = 10 |         p2.age = 10 | ||||||
|         p2.save() |         p2.save() | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(Person.objects(age__icontains='ten').count(), 2) |         assert Person.objects(age__icontains="ten").count() == 2 | ||||||
|         self.assertEqual(Person.objects(age__gte=10).count(), 1) |         assert Person.objects(age__gte=10).count() == 1 | ||||||
| 
 | 
 | ||||||
|     def test_complex_data_lookups(self): |     def test_complex_data_lookups(self): | ||||||
|         """Ensure you can query dynamic document dynamic fields""" |         """Ensure you can query dynamic document dynamic fields""" | ||||||
|         p = self.Person() |         p = self.Person() | ||||||
|         p.misc = {'hello': 'world'} |         p.misc = {"hello": "world"} | ||||||
|         p.save() |         p.save() | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(1, self.Person.objects(misc__hello='world').count()) |         assert 1 == self.Person.objects(misc__hello="world").count() | ||||||
| 
 | 
 | ||||||
|     def test_three_level_complex_data_lookups(self): |     def test_three_level_complex_data_lookups(self): | ||||||
|         """Ensure you can query three level document dynamic fields""" |         """Ensure you can query three level document dynamic fields""" | ||||||
|         p = self.Person.objects.create( |         self.Person.objects.create(misc={"hello": {"hello2": "world"}}) | ||||||
|             misc={'hello': {'hello2': 'world'}} |         assert 1 == self.Person.objects(misc__hello__hello2="world").count() | ||||||
|         ) |  | ||||||
|         self.assertEqual(1, self.Person.objects(misc__hello__hello2='world').count()) |  | ||||||
| 
 | 
 | ||||||
|     def test_complex_embedded_document_validation(self): |     def test_complex_embedded_document_validation(self): | ||||||
|         """Ensure embedded dynamic documents may be validated""" |         """Ensure embedded dynamic documents may be validated""" | ||||||
|  | 
 | ||||||
|         class Embedded(DynamicEmbeddedDocument): |         class Embedded(DynamicEmbeddedDocument): | ||||||
|             content = URLField() |             content = URLField() | ||||||
| 
 | 
 | ||||||
| @@ -222,27 +198,29 @@ class TestDynamicDocument(MongoDBTestCase): | |||||||
|         Doc.drop_collection() |         Doc.drop_collection() | ||||||
|         doc = Doc() |         doc = Doc() | ||||||
| 
 | 
 | ||||||
|         embedded_doc_1 = Embedded(content='http://mongoengine.org') |         embedded_doc_1 = Embedded(content="http://mongoengine.org") | ||||||
|         embedded_doc_1.validate() |         embedded_doc_1.validate() | ||||||
| 
 | 
 | ||||||
|         embedded_doc_2 = Embedded(content='this is not a url') |         embedded_doc_2 = Embedded(content="this is not a url") | ||||||
|         self.assertRaises(ValidationError, embedded_doc_2.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             embedded_doc_2.validate() | ||||||
| 
 | 
 | ||||||
|         doc.embedded_field_1 = embedded_doc_1 |         doc.embedded_field_1 = embedded_doc_1 | ||||||
|         doc.embedded_field_2 = embedded_doc_2 |         doc.embedded_field_2 = embedded_doc_2 | ||||||
|         self.assertRaises(ValidationError, doc.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             doc.validate() | ||||||
| 
 | 
 | ||||||
|     def test_inheritance(self): |     def test_inheritance(self): | ||||||
|         """Ensure that dynamic document plays nice with inheritance""" |         """Ensure that dynamic document plays nice with inheritance""" | ||||||
|  | 
 | ||||||
|         class Employee(self.Person): |         class Employee(self.Person): | ||||||
|             salary = IntField() |             salary = IntField() | ||||||
| 
 | 
 | ||||||
|         Employee.drop_collection() |         Employee.drop_collection() | ||||||
| 
 | 
 | ||||||
|         self.assertIn('name', Employee._fields) |         assert "name" in Employee._fields | ||||||
|         self.assertIn('salary', Employee._fields) |         assert "salary" in Employee._fields | ||||||
|         self.assertEqual(Employee._get_collection_name(), |         assert Employee._get_collection_name() == self.Person._get_collection_name() | ||||||
|                          self.Person._get_collection_name()) |  | ||||||
| 
 | 
 | ||||||
|         joe_bloggs = Employee() |         joe_bloggs = Employee() | ||||||
|         joe_bloggs.name = "Joe Bloggs" |         joe_bloggs.name = "Joe Bloggs" | ||||||
| @@ -250,14 +228,15 @@ class TestDynamicDocument(MongoDBTestCase): | |||||||
|         joe_bloggs.age = 20 |         joe_bloggs.age = 20 | ||||||
|         joe_bloggs.save() |         joe_bloggs.save() | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(1, self.Person.objects(age=20).count()) |         assert 1 == self.Person.objects(age=20).count() | ||||||
|         self.assertEqual(1, Employee.objects(age=20).count()) |         assert 1 == Employee.objects(age=20).count() | ||||||
| 
 | 
 | ||||||
|         joe_bloggs = self.Person.objects.first() |         joe_bloggs = self.Person.objects.first() | ||||||
|         self.assertIsInstance(joe_bloggs, Employee) |         assert isinstance(joe_bloggs, Employee) | ||||||
| 
 | 
 | ||||||
|     def test_embedded_dynamic_document(self): |     def test_embedded_dynamic_document(self): | ||||||
|         """Test dynamic embedded documents""" |         """Test dynamic embedded documents""" | ||||||
|  | 
 | ||||||
|         class Embedded(DynamicEmbeddedDocument): |         class Embedded(DynamicEmbeddedDocument): | ||||||
|             pass |             pass | ||||||
| 
 | 
 | ||||||
| @@ -268,33 +247,33 @@ class TestDynamicDocument(MongoDBTestCase): | |||||||
|         doc = Doc() |         doc = Doc() | ||||||
| 
 | 
 | ||||||
|         embedded_1 = Embedded() |         embedded_1 = Embedded() | ||||||
|         embedded_1.string_field = 'hello' |         embedded_1.string_field = "hello" | ||||||
|         embedded_1.int_field = 1 |         embedded_1.int_field = 1 | ||||||
|         embedded_1.dict_field = {'hello': 'world'} |         embedded_1.dict_field = {"hello": "world"} | ||||||
|         embedded_1.list_field = ['1', 2, {'hello': 'world'}] |         embedded_1.list_field = ["1", 2, {"hello": "world"}] | ||||||
|         doc.embedded_field = embedded_1 |         doc.embedded_field = embedded_1 | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(doc.to_mongo(), { |         assert doc.to_mongo() == { | ||||||
|             "embedded_field": { |             "embedded_field": { | ||||||
|                 "_cls": "Embedded", |                 "_cls": "Embedded", | ||||||
|                 "string_field": "hello", |                 "string_field": "hello", | ||||||
|                 "int_field": 1, |                 "int_field": 1, | ||||||
|                 "dict_field": {"hello": "world"}, |                 "dict_field": {"hello": "world"}, | ||||||
|                 "list_field": ['1', 2, {'hello': 'world'}] |                 "list_field": ["1", 2, {"hello": "world"}], | ||||||
|             } |             } | ||||||
|         }) |         } | ||||||
|         doc.save() |         doc.save() | ||||||
| 
 | 
 | ||||||
|         doc = Doc.objects.first() |         doc = Doc.objects.first() | ||||||
|         self.assertEqual(doc.embedded_field.__class__, Embedded) |         assert doc.embedded_field.__class__ == Embedded | ||||||
|         self.assertEqual(doc.embedded_field.string_field, "hello") |         assert doc.embedded_field.string_field == "hello" | ||||||
|         self.assertEqual(doc.embedded_field.int_field, 1) |         assert doc.embedded_field.int_field == 1 | ||||||
|         self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'}) |         assert doc.embedded_field.dict_field == {"hello": "world"} | ||||||
|         self.assertEqual(doc.embedded_field.list_field, |         assert doc.embedded_field.list_field == ["1", 2, {"hello": "world"}] | ||||||
|                             ['1', 2, {'hello': 'world'}]) |  | ||||||
| 
 | 
 | ||||||
|     def test_complex_embedded_documents(self): |     def test_complex_embedded_documents(self): | ||||||
|         """Test complex dynamic embedded documents setups""" |         """Test complex dynamic embedded documents setups""" | ||||||
|  | 
 | ||||||
|         class Embedded(DynamicEmbeddedDocument): |         class Embedded(DynamicEmbeddedDocument): | ||||||
|             pass |             pass | ||||||
| 
 | 
 | ||||||
| @@ -305,51 +284,54 @@ class TestDynamicDocument(MongoDBTestCase): | |||||||
|         doc = Doc() |         doc = Doc() | ||||||
| 
 | 
 | ||||||
|         embedded_1 = Embedded() |         embedded_1 = Embedded() | ||||||
|         embedded_1.string_field = 'hello' |         embedded_1.string_field = "hello" | ||||||
|         embedded_1.int_field = 1 |         embedded_1.int_field = 1 | ||||||
|         embedded_1.dict_field = {'hello': 'world'} |         embedded_1.dict_field = {"hello": "world"} | ||||||
| 
 | 
 | ||||||
|         embedded_2 = Embedded() |         embedded_2 = Embedded() | ||||||
|         embedded_2.string_field = 'hello' |         embedded_2.string_field = "hello" | ||||||
|         embedded_2.int_field = 1 |         embedded_2.int_field = 1 | ||||||
|         embedded_2.dict_field = {'hello': 'world'} |         embedded_2.dict_field = {"hello": "world"} | ||||||
|         embedded_2.list_field = ['1', 2, {'hello': 'world'}] |         embedded_2.list_field = ["1", 2, {"hello": "world"}] | ||||||
| 
 | 
 | ||||||
|         embedded_1.list_field = ['1', 2, embedded_2] |         embedded_1.list_field = ["1", 2, embedded_2] | ||||||
|         doc.embedded_field = embedded_1 |         doc.embedded_field = embedded_1 | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(doc.to_mongo(), { |         assert doc.to_mongo() == { | ||||||
|             "embedded_field": { |             "embedded_field": { | ||||||
|                 "_cls": "Embedded", |                 "_cls": "Embedded", | ||||||
|                 "string_field": "hello", |                 "string_field": "hello", | ||||||
|                 "int_field": 1, |                 "int_field": 1, | ||||||
|                 "dict_field": {"hello": "world"}, |                 "dict_field": {"hello": "world"}, | ||||||
|                 "list_field": ['1', 2, |                 "list_field": [ | ||||||
|                     {"_cls": "Embedded", |                     "1", | ||||||
|                     "string_field": "hello", |                     2, | ||||||
|                     "int_field": 1, |                     { | ||||||
|                     "dict_field": {"hello": "world"}, |                         "_cls": "Embedded", | ||||||
|                     "list_field": ['1', 2, {'hello': 'world'}]} |                         "string_field": "hello", | ||||||
|                 ] |                         "int_field": 1, | ||||||
|  |                         "dict_field": {"hello": "world"}, | ||||||
|  |                         "list_field": ["1", 2, {"hello": "world"}], | ||||||
|  |                     }, | ||||||
|  |                 ], | ||||||
|             } |             } | ||||||
|         }) |         } | ||||||
|         doc.save() |         doc.save() | ||||||
|         doc = Doc.objects.first() |         doc = Doc.objects.first() | ||||||
|         self.assertEqual(doc.embedded_field.__class__, Embedded) |         assert doc.embedded_field.__class__ == Embedded | ||||||
|         self.assertEqual(doc.embedded_field.string_field, "hello") |         assert doc.embedded_field.string_field == "hello" | ||||||
|         self.assertEqual(doc.embedded_field.int_field, 1) |         assert doc.embedded_field.int_field == 1 | ||||||
|         self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'}) |         assert doc.embedded_field.dict_field == {"hello": "world"} | ||||||
|         self.assertEqual(doc.embedded_field.list_field[0], '1') |         assert doc.embedded_field.list_field[0] == "1" | ||||||
|         self.assertEqual(doc.embedded_field.list_field[1], 2) |         assert doc.embedded_field.list_field[1] == 2 | ||||||
| 
 | 
 | ||||||
|         embedded_field = doc.embedded_field.list_field[2] |         embedded_field = doc.embedded_field.list_field[2] | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(embedded_field.__class__, Embedded) |         assert embedded_field.__class__ == Embedded | ||||||
|         self.assertEqual(embedded_field.string_field, "hello") |         assert embedded_field.string_field == "hello" | ||||||
|         self.assertEqual(embedded_field.int_field, 1) |         assert embedded_field.int_field == 1 | ||||||
|         self.assertEqual(embedded_field.dict_field, {'hello': 'world'}) |         assert embedded_field.dict_field == {"hello": "world"} | ||||||
|         self.assertEqual(embedded_field.list_field, ['1', 2, |         assert embedded_field.list_field == ["1", 2, {"hello": "world"}] | ||||||
|                                                         {'hello': 'world'}]) |  | ||||||
| 
 | 
 | ||||||
|     def test_dynamic_and_embedded(self): |     def test_dynamic_and_embedded(self): | ||||||
|         """Ensure embedded documents play nicely""" |         """Ensure embedded documents play nicely""" | ||||||
| @@ -368,18 +350,18 @@ class TestDynamicDocument(MongoDBTestCase): | |||||||
|         person.address.city = "Lundenne" |         person.address.city = "Lundenne" | ||||||
|         person.save() |         person.save() | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(Person.objects.first().address.city, "Lundenne") |         assert Person.objects.first().address.city == "Lundenne" | ||||||
| 
 | 
 | ||||||
|         person = Person.objects.first() |         person = Person.objects.first() | ||||||
|         person.address = Address(city="Londinium") |         person.address = Address(city="Londinium") | ||||||
|         person.save() |         person.save() | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(Person.objects.first().address.city, "Londinium") |         assert Person.objects.first().address.city == "Londinium" | ||||||
| 
 | 
 | ||||||
|         person = Person.objects.first() |         person = Person.objects.first() | ||||||
|         person.age = 35 |         person.age = 35 | ||||||
|         person.save() |         person.save() | ||||||
|         self.assertEqual(Person.objects.first().age, 35) |         assert Person.objects.first().age == 35 | ||||||
| 
 | 
 | ||||||
|     def test_dynamic_embedded_works_with_only(self): |     def test_dynamic_embedded_works_with_only(self): | ||||||
|         """Ensure custom fieldnames on a dynamic embedded document are found by qs.only()""" |         """Ensure custom fieldnames on a dynamic embedded document are found by qs.only()""" | ||||||
| @@ -392,10 +374,15 @@ class TestDynamicDocument(MongoDBTestCase): | |||||||
| 
 | 
 | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
| 
 | 
 | ||||||
|         Person(name="Eric", address=Address(city="San Francisco", street_number="1337")).save() |         Person( | ||||||
|  |             name="Eric", address=Address(city="San Francisco", street_number="1337") | ||||||
|  |         ).save() | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(Person.objects.first().address.street_number, '1337') |         assert Person.objects.first().address.street_number == "1337" | ||||||
|         self.assertEqual(Person.objects.only('address__street_number').first().address.street_number, '1337') |         assert ( | ||||||
|  |             Person.objects.only("address__street_number").first().address.street_number | ||||||
|  |             == "1337" | ||||||
|  |         ) | ||||||
| 
 | 
 | ||||||
|     def test_dynamic_and_embedded_dict_access(self): |     def test_dynamic_and_embedded_dict_access(self): | ||||||
|         """Ensure embedded dynamic documents work with dict[] style access""" |         """Ensure embedded dynamic documents work with dict[] style access""" | ||||||
| @@ -419,21 +406,21 @@ class TestDynamicDocument(MongoDBTestCase): | |||||||
|         person["address"]["city"] = "Lundenne" |         person["address"]["city"] = "Lundenne" | ||||||
|         person.save() |         person.save() | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(Person.objects.first().address.city, "Lundenne") |         assert Person.objects.first().address.city == "Lundenne" | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(Person.objects.first().phone, "555-1212") |         assert Person.objects.first().phone == "555-1212" | ||||||
| 
 | 
 | ||||||
|         person = Person.objects.first() |         person = Person.objects.first() | ||||||
|         person.address = Address(city="Londinium") |         person.address = Address(city="Londinium") | ||||||
|         person.save() |         person.save() | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(Person.objects.first().address.city, "Londinium") |         assert Person.objects.first().address.city == "Londinium" | ||||||
| 
 | 
 | ||||||
|         person = Person.objects.first() |         person = Person.objects.first() | ||||||
|         person["age"] = 35 |         person["age"] = 35 | ||||||
|         person.save() |         person.save() | ||||||
|         self.assertEqual(Person.objects.first().age, 35) |         assert Person.objects.first().age == 35 | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| if __name__ == '__main__': | if __name__ == "__main__": | ||||||
|     unittest.main() |     unittest.main() | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										631
									
								
								tests/document/test_inheritance.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										631
									
								
								tests/document/test_inheritance.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,631 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | import unittest | ||||||
|  | import warnings | ||||||
|  |  | ||||||
|  | import pytest | ||||||
|  |  | ||||||
|  | from mongoengine import ( | ||||||
|  |     BooleanField, | ||||||
|  |     Document, | ||||||
|  |     EmbeddedDocument, | ||||||
|  |     EmbeddedDocumentField, | ||||||
|  |     GenericReferenceField, | ||||||
|  |     IntField, | ||||||
|  |     ReferenceField, | ||||||
|  |     StringField, | ||||||
|  | ) | ||||||
|  | from mongoengine.pymongo_support import list_collection_names | ||||||
|  | from tests.fixtures import Base | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestInheritance(MongoDBTestCase): | ||||||
|  |     def tearDown(self): | ||||||
|  |         for collection in list_collection_names(self.db): | ||||||
|  |             self.db.drop_collection(collection) | ||||||
|  |  | ||||||
|  |     def test_constructor_cls(self): | ||||||
|  |         # Ensures _cls is properly set during construction | ||||||
|  |         # and when object gets reloaded (prevent regression of #1950) | ||||||
|  |         class EmbedData(EmbeddedDocument): | ||||||
|  |             data = StringField() | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         class DataDoc(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             embed = EmbeddedDocumentField(EmbedData) | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         test_doc = DataDoc(name="test", embed=EmbedData(data="data")) | ||||||
|  |         assert test_doc._cls == "DataDoc" | ||||||
|  |         assert test_doc.embed._cls == "EmbedData" | ||||||
|  |         test_doc.save() | ||||||
|  |         saved_doc = DataDoc.objects.with_id(test_doc.id) | ||||||
|  |         assert test_doc._cls == saved_doc._cls | ||||||
|  |         assert test_doc.embed._cls == saved_doc.embed._cls | ||||||
|  |         test_doc.delete() | ||||||
|  |  | ||||||
|  |     def test_superclasses(self): | ||||||
|  |         """Ensure that the correct list of superclasses is assembled. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Animal(Document): | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         class Fish(Animal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Guppy(Fish): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Mammal(Animal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Dog(Mammal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Human(Mammal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         assert Animal._superclasses == () | ||||||
|  |         assert Fish._superclasses == ("Animal",) | ||||||
|  |         assert Guppy._superclasses == ("Animal", "Animal.Fish") | ||||||
|  |         assert Mammal._superclasses == ("Animal",) | ||||||
|  |         assert Dog._superclasses == ("Animal", "Animal.Mammal") | ||||||
|  |         assert Human._superclasses == ("Animal", "Animal.Mammal") | ||||||
|  |  | ||||||
|  |     def test_external_superclasses(self): | ||||||
|  |         """Ensure that the correct list of super classes is assembled when | ||||||
|  |         importing part of the model. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Animal(Base): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Fish(Animal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Guppy(Fish): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Mammal(Animal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Dog(Mammal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Human(Mammal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         assert Animal._superclasses == ("Base",) | ||||||
|  |         assert Fish._superclasses == ("Base", "Base.Animal") | ||||||
|  |         assert Guppy._superclasses == ("Base", "Base.Animal", "Base.Animal.Fish") | ||||||
|  |         assert Mammal._superclasses == ("Base", "Base.Animal") | ||||||
|  |         assert Dog._superclasses == ("Base", "Base.Animal", "Base.Animal.Mammal") | ||||||
|  |         assert Human._superclasses == ("Base", "Base.Animal", "Base.Animal.Mammal") | ||||||
|  |  | ||||||
|  |     def test_subclasses(self): | ||||||
|  |         """Ensure that the correct list of _subclasses (subclasses) is | ||||||
|  |         assembled. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Animal(Document): | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         class Fish(Animal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Guppy(Fish): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Mammal(Animal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Dog(Mammal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Human(Mammal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         assert Animal._subclasses == ( | ||||||
|  |             "Animal", | ||||||
|  |             "Animal.Fish", | ||||||
|  |             "Animal.Fish.Guppy", | ||||||
|  |             "Animal.Mammal", | ||||||
|  |             "Animal.Mammal.Dog", | ||||||
|  |             "Animal.Mammal.Human", | ||||||
|  |         ) | ||||||
|  |         assert Fish._subclasses == ("Animal.Fish", "Animal.Fish.Guppy") | ||||||
|  |         assert Guppy._subclasses == ("Animal.Fish.Guppy",) | ||||||
|  |         assert Mammal._subclasses == ( | ||||||
|  |             "Animal.Mammal", | ||||||
|  |             "Animal.Mammal.Dog", | ||||||
|  |             "Animal.Mammal.Human", | ||||||
|  |         ) | ||||||
|  |         assert Human._subclasses == ("Animal.Mammal.Human",) | ||||||
|  |  | ||||||
|  |     def test_external_subclasses(self): | ||||||
|  |         """Ensure that the correct list of _subclasses (subclasses) is | ||||||
|  |         assembled when importing part of the model. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Animal(Base): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Fish(Animal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Guppy(Fish): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Mammal(Animal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Dog(Mammal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Human(Mammal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         assert Animal._subclasses == ( | ||||||
|  |             "Base.Animal", | ||||||
|  |             "Base.Animal.Fish", | ||||||
|  |             "Base.Animal.Fish.Guppy", | ||||||
|  |             "Base.Animal.Mammal", | ||||||
|  |             "Base.Animal.Mammal.Dog", | ||||||
|  |             "Base.Animal.Mammal.Human", | ||||||
|  |         ) | ||||||
|  |         assert Fish._subclasses == ("Base.Animal.Fish", "Base.Animal.Fish.Guppy") | ||||||
|  |         assert Guppy._subclasses == ("Base.Animal.Fish.Guppy",) | ||||||
|  |         assert Mammal._subclasses == ( | ||||||
|  |             "Base.Animal.Mammal", | ||||||
|  |             "Base.Animal.Mammal.Dog", | ||||||
|  |             "Base.Animal.Mammal.Human", | ||||||
|  |         ) | ||||||
|  |         assert Human._subclasses == ("Base.Animal.Mammal.Human",) | ||||||
|  |  | ||||||
|  |     def test_dynamic_declarations(self): | ||||||
|  |         """Test that declaring an extra class updates meta data""" | ||||||
|  |  | ||||||
|  |         class Animal(Document): | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         assert Animal._superclasses == () | ||||||
|  |         assert Animal._subclasses == ("Animal",) | ||||||
|  |  | ||||||
|  |         # Test dynamically adding a class changes the meta data | ||||||
|  |         class Fish(Animal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         assert Animal._superclasses == () | ||||||
|  |         assert Animal._subclasses == ("Animal", "Animal.Fish") | ||||||
|  |  | ||||||
|  |         assert Fish._superclasses == ("Animal",) | ||||||
|  |         assert Fish._subclasses == ("Animal.Fish",) | ||||||
|  |  | ||||||
|  |         # Test dynamically adding an inherited class changes the meta data | ||||||
|  |         class Pike(Fish): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         assert Animal._superclasses == () | ||||||
|  |         assert Animal._subclasses == ("Animal", "Animal.Fish", "Animal.Fish.Pike") | ||||||
|  |  | ||||||
|  |         assert Fish._superclasses == ("Animal",) | ||||||
|  |         assert Fish._subclasses == ("Animal.Fish", "Animal.Fish.Pike") | ||||||
|  |  | ||||||
|  |         assert Pike._superclasses == ("Animal", "Animal.Fish") | ||||||
|  |         assert Pike._subclasses == ("Animal.Fish.Pike",) | ||||||
|  |  | ||||||
|  |     def test_inheritance_meta_data(self): | ||||||
|  |         """Ensure that document may inherit fields from a superclass document. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             age = IntField() | ||||||
|  |  | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         class Employee(Person): | ||||||
|  |             salary = IntField() | ||||||
|  |  | ||||||
|  |         assert ["_cls", "age", "id", "name", "salary"] == sorted( | ||||||
|  |             Employee._fields.keys() | ||||||
|  |         ) | ||||||
|  |         assert Employee._get_collection_name() == Person._get_collection_name() | ||||||
|  |  | ||||||
|  |     def test_inheritance_to_mongo_keys(self): | ||||||
|  |         """Ensure that document may inherit fields from a superclass document. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             age = IntField() | ||||||
|  |  | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         class Employee(Person): | ||||||
|  |             salary = IntField() | ||||||
|  |  | ||||||
|  |         assert ["_cls", "age", "id", "name", "salary"] == sorted( | ||||||
|  |             Employee._fields.keys() | ||||||
|  |         ) | ||||||
|  |         assert Person(name="Bob", age=35).to_mongo().keys() == ["_cls", "name", "age"] | ||||||
|  |         assert Employee(name="Bob", age=35, salary=0).to_mongo().keys() == [ | ||||||
|  |             "_cls", | ||||||
|  |             "name", | ||||||
|  |             "age", | ||||||
|  |             "salary", | ||||||
|  |         ] | ||||||
|  |         assert Employee._get_collection_name() == Person._get_collection_name() | ||||||
|  |  | ||||||
|  |     def test_indexes_and_multiple_inheritance(self): | ||||||
|  |         """ Ensure that all of the indexes are created for a document with | ||||||
|  |         multiple inheritance. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class A(Document): | ||||||
|  |             a = StringField() | ||||||
|  |  | ||||||
|  |             meta = {"allow_inheritance": True, "indexes": ["a"]} | ||||||
|  |  | ||||||
|  |         class B(Document): | ||||||
|  |             b = StringField() | ||||||
|  |  | ||||||
|  |             meta = {"allow_inheritance": True, "indexes": ["b"]} | ||||||
|  |  | ||||||
|  |         class C(A, B): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         A.drop_collection() | ||||||
|  |         B.drop_collection() | ||||||
|  |         C.drop_collection() | ||||||
|  |  | ||||||
|  |         C.ensure_indexes() | ||||||
|  |  | ||||||
|  |         assert sorted( | ||||||
|  |             [idx["key"] for idx in C._get_collection().index_information().values()] | ||||||
|  |         ) == sorted( | ||||||
|  |             [[(u"_cls", 1), (u"b", 1)], [(u"_id", 1)], [(u"_cls", 1), (u"a", 1)]] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def test_polymorphic_queries(self): | ||||||
|  |         """Ensure that the correct subclasses are returned from a query | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Animal(Document): | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         class Fish(Animal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Mammal(Animal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Dog(Mammal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Human(Mammal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |  | ||||||
|  |         Animal().save() | ||||||
|  |         Fish().save() | ||||||
|  |         Mammal().save() | ||||||
|  |         Dog().save() | ||||||
|  |         Human().save() | ||||||
|  |  | ||||||
|  |         classes = [obj.__class__ for obj in Animal.objects] | ||||||
|  |         assert classes == [Animal, Fish, Mammal, Dog, Human] | ||||||
|  |  | ||||||
|  |         classes = [obj.__class__ for obj in Mammal.objects] | ||||||
|  |         assert classes == [Mammal, Dog, Human] | ||||||
|  |  | ||||||
|  |         classes = [obj.__class__ for obj in Human.objects] | ||||||
|  |         assert classes == [Human] | ||||||
|  |  | ||||||
|  |     def test_allow_inheritance(self): | ||||||
|  |         """Ensure that inheritance is disabled by default on simple | ||||||
|  |         classes and that _cls will not be used. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         # can't inherit because Animal didn't explicitly allow inheritance | ||||||
|  |         with pytest.raises(ValueError, match="Document Animal may not be subclassed"): | ||||||
|  |  | ||||||
|  |             class Dog(Animal): | ||||||
|  |                 pass | ||||||
|  |  | ||||||
|  |         # Check that _cls etc aren't present on simple documents | ||||||
|  |         dog = Animal(name="dog").save() | ||||||
|  |         assert dog.to_mongo().keys() == ["_id", "name"] | ||||||
|  |  | ||||||
|  |         collection = self.db[Animal._get_collection_name()] | ||||||
|  |         obj = collection.find_one() | ||||||
|  |         assert "_cls" not in obj | ||||||
|  |  | ||||||
|  |     def test_cant_turn_off_inheritance_on_subclass(self): | ||||||
|  |         """Ensure if inheritance is on in a subclass you cant turn it off. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         with pytest.raises(ValueError) as exc_info: | ||||||
|  |  | ||||||
|  |             class Mammal(Animal): | ||||||
|  |                 meta = {"allow_inheritance": False} | ||||||
|  |  | ||||||
|  |         assert ( | ||||||
|  |             str(exc_info.value) | ||||||
|  |             == 'Only direct subclasses of Document may set "allow_inheritance" to False' | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def test_allow_inheritance_abstract_document(self): | ||||||
|  |         """Ensure that abstract documents can set inheritance rules and that | ||||||
|  |         _cls will not be used. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class FinalDocument(Document): | ||||||
|  |             meta = {"abstract": True, "allow_inheritance": False} | ||||||
|  |  | ||||||
|  |         class Animal(FinalDocument): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         with pytest.raises(ValueError): | ||||||
|  |  | ||||||
|  |             class Mammal(Animal): | ||||||
|  |                 pass | ||||||
|  |  | ||||||
|  |         # Check that _cls isn't present in simple documents | ||||||
|  |         doc = Animal(name="dog") | ||||||
|  |         assert "_cls" not in doc.to_mongo() | ||||||
|  |  | ||||||
|  |     def test_using_abstract_class_in_reference_field(self): | ||||||
|  |         # Ensures no regression of #1920 | ||||||
|  |         class AbstractHuman(Document): | ||||||
|  |             meta = {"abstract": True} | ||||||
|  |  | ||||||
|  |         class Dad(AbstractHuman): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Home(Document): | ||||||
|  |             dad = ReferenceField(AbstractHuman)  # Referencing the abstract class | ||||||
|  |             address = StringField() | ||||||
|  |  | ||||||
|  |         dad = Dad(name="5").save() | ||||||
|  |         Home(dad=dad, address="street").save() | ||||||
|  |  | ||||||
|  |         home = Home.objects.first() | ||||||
|  |         home.address = "garbage" | ||||||
|  |         home.save()  # Was failing with ValidationError | ||||||
|  |  | ||||||
|  |     def test_abstract_class_referencing_self(self): | ||||||
|  |         # Ensures no regression of #1920 | ||||||
|  |         class Human(Document): | ||||||
|  |             meta = {"abstract": True} | ||||||
|  |             creator = ReferenceField("self", dbref=True) | ||||||
|  |  | ||||||
|  |         class User(Human): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         user = User(name="John").save() | ||||||
|  |         user2 = User(name="Foo", creator=user).save() | ||||||
|  |  | ||||||
|  |         user2 = User.objects.with_id(user2.id) | ||||||
|  |         user2.name = "Bar" | ||||||
|  |         user2.save()  # Was failing with ValidationError | ||||||
|  |  | ||||||
|  |     def test_abstract_handle_ids_in_metaclass_properly(self): | ||||||
|  |         class City(Document): | ||||||
|  |             continent = StringField() | ||||||
|  |             meta = {"abstract": True, "allow_inheritance": False} | ||||||
|  |  | ||||||
|  |         class EuropeanCity(City): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         berlin = EuropeanCity(name="Berlin", continent="Europe") | ||||||
|  |         assert len(berlin._db_field_map) == len(berlin._fields_ordered) | ||||||
|  |         assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered) | ||||||
|  |         assert len(berlin._fields_ordered) == 3 | ||||||
|  |         assert berlin._fields_ordered[0] == "id" | ||||||
|  |  | ||||||
|  |     def test_auto_id_not_set_if_specific_in_parent_class(self): | ||||||
|  |         class City(Document): | ||||||
|  |             continent = StringField() | ||||||
|  |             city_id = IntField(primary_key=True) | ||||||
|  |             meta = {"abstract": True, "allow_inheritance": False} | ||||||
|  |  | ||||||
|  |         class EuropeanCity(City): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         berlin = EuropeanCity(name="Berlin", continent="Europe") | ||||||
|  |         assert len(berlin._db_field_map) == len(berlin._fields_ordered) | ||||||
|  |         assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered) | ||||||
|  |         assert len(berlin._fields_ordered) == 3 | ||||||
|  |         assert berlin._fields_ordered[0] == "city_id" | ||||||
|  |  | ||||||
|  |     def test_auto_id_vs_non_pk_id_field(self): | ||||||
|  |         class City(Document): | ||||||
|  |             continent = StringField() | ||||||
|  |             id = IntField() | ||||||
|  |             meta = {"abstract": True, "allow_inheritance": False} | ||||||
|  |  | ||||||
|  |         class EuropeanCity(City): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         berlin = EuropeanCity(name="Berlin", continent="Europe") | ||||||
|  |         assert len(berlin._db_field_map) == len(berlin._fields_ordered) | ||||||
|  |         assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered) | ||||||
|  |         assert len(berlin._fields_ordered) == 4 | ||||||
|  |         assert berlin._fields_ordered[0] == "auto_id_0" | ||||||
|  |         berlin.save() | ||||||
|  |         assert berlin.pk == berlin.auto_id_0 | ||||||
|  |  | ||||||
|  |     def test_abstract_document_creation_does_not_fail(self): | ||||||
|  |         class City(Document): | ||||||
|  |             continent = StringField() | ||||||
|  |             meta = {"abstract": True, "allow_inheritance": False} | ||||||
|  |  | ||||||
|  |         city = City(continent="asia") | ||||||
|  |         assert city.pk is None | ||||||
|  |         # TODO: expected error? Shouldn't we create a new error type? | ||||||
|  |         with pytest.raises(KeyError): | ||||||
|  |             setattr(city, "pk", 1) | ||||||
|  |  | ||||||
|  |     def test_allow_inheritance_embedded_document(self): | ||||||
|  |         """Ensure embedded documents respect inheritance.""" | ||||||
|  |  | ||||||
|  |         class Comment(EmbeddedDocument): | ||||||
|  |             content = StringField() | ||||||
|  |  | ||||||
|  |         with pytest.raises(ValueError): | ||||||
|  |  | ||||||
|  |             class SpecialComment(Comment): | ||||||
|  |                 pass | ||||||
|  |  | ||||||
|  |         doc = Comment(content="test") | ||||||
|  |         assert "_cls" not in doc.to_mongo() | ||||||
|  |  | ||||||
|  |         class Comment(EmbeddedDocument): | ||||||
|  |             content = StringField() | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         doc = Comment(content="test") | ||||||
|  |         assert "_cls" in doc.to_mongo() | ||||||
|  |  | ||||||
|  |     def test_document_inheritance(self): | ||||||
|  |         """Ensure mutliple inheritance of abstract documents | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class DateCreatedDocument(Document): | ||||||
|  |             meta = {"allow_inheritance": True, "abstract": True} | ||||||
|  |  | ||||||
|  |         class DateUpdatedDocument(Document): | ||||||
|  |             meta = {"allow_inheritance": True, "abstract": True} | ||||||
|  |  | ||||||
|  |         try: | ||||||
|  |  | ||||||
|  |             class MyDocument(DateCreatedDocument, DateUpdatedDocument): | ||||||
|  |                 pass | ||||||
|  |  | ||||||
|  |         except Exception: | ||||||
|  |             assert False, "Couldn't create MyDocument class" | ||||||
|  |  | ||||||
|  |     def test_abstract_documents(self): | ||||||
|  |         """Ensure that a document superclass can be marked as abstract | ||||||
|  |         thereby not using it as the name for the collection.""" | ||||||
|  |  | ||||||
|  |         defaults = { | ||||||
|  |             "index_background": True, | ||||||
|  |             "index_opts": {"hello": "world"}, | ||||||
|  |             "allow_inheritance": True, | ||||||
|  |             "queryset_class": "QuerySet", | ||||||
|  |             "db_alias": "myDB", | ||||||
|  |             "shard_key": ("hello", "world"), | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |         meta_settings = {"abstract": True} | ||||||
|  |         meta_settings.update(defaults) | ||||||
|  |  | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             meta = meta_settings | ||||||
|  |  | ||||||
|  |         class Fish(Animal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Guppy(Fish): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Mammal(Animal): | ||||||
|  |             meta = {"abstract": True} | ||||||
|  |  | ||||||
|  |         class Human(Mammal): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         for k, v in defaults.items(): | ||||||
|  |             for cls in [Animal, Fish, Guppy]: | ||||||
|  |                 assert cls._meta[k] == v | ||||||
|  |  | ||||||
|  |         assert "collection" not in Animal._meta | ||||||
|  |         assert "collection" not in Mammal._meta | ||||||
|  |  | ||||||
|  |         assert Animal._get_collection_name() is None | ||||||
|  |         assert Mammal._get_collection_name() is None | ||||||
|  |  | ||||||
|  |         assert Fish._get_collection_name() == "fish" | ||||||
|  |         assert Guppy._get_collection_name() == "fish" | ||||||
|  |         assert Human._get_collection_name() == "human" | ||||||
|  |  | ||||||
|  |         # ensure that a subclass of a non-abstract class can't be abstract | ||||||
|  |         with pytest.raises(ValueError): | ||||||
|  |  | ||||||
|  |             class EvilHuman(Human): | ||||||
|  |                 evil = BooleanField(default=True) | ||||||
|  |                 meta = {"abstract": True} | ||||||
|  |  | ||||||
|  |     def test_abstract_embedded_documents(self): | ||||||
|  |         # 789: EmbeddedDocument shouldn't inherit abstract | ||||||
|  |         class A(EmbeddedDocument): | ||||||
|  |             meta = {"abstract": True} | ||||||
|  |  | ||||||
|  |         class B(A): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         assert not B._meta["abstract"] | ||||||
|  |  | ||||||
|  |     def test_inherited_collections(self): | ||||||
|  |         """Ensure that subclassed documents don't override parents' | ||||||
|  |         collections | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Drink(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         class Drinker(Document): | ||||||
|  |             drink = GenericReferenceField() | ||||||
|  |  | ||||||
|  |         try: | ||||||
|  |             warnings.simplefilter("error") | ||||||
|  |  | ||||||
|  |             class AcloholicDrink(Drink): | ||||||
|  |                 meta = {"collection": "booze"} | ||||||
|  |  | ||||||
|  |         except SyntaxWarning: | ||||||
|  |             warnings.simplefilter("ignore") | ||||||
|  |  | ||||||
|  |             class AlcoholicDrink(Drink): | ||||||
|  |                 meta = {"collection": "booze"} | ||||||
|  |  | ||||||
|  |         else: | ||||||
|  |             raise AssertionError("SyntaxWarning should be triggered") | ||||||
|  |  | ||||||
|  |         warnings.resetwarnings() | ||||||
|  |  | ||||||
|  |         Drink.drop_collection() | ||||||
|  |         AlcoholicDrink.drop_collection() | ||||||
|  |         Drinker.drop_collection() | ||||||
|  |  | ||||||
|  |         red_bull = Drink(name="Red Bull") | ||||||
|  |         red_bull.save() | ||||||
|  |  | ||||||
|  |         programmer = Drinker(drink=red_bull) | ||||||
|  |         programmer.save() | ||||||
|  |  | ||||||
|  |         beer = AlcoholicDrink(name="Beer") | ||||||
|  |         beer.save() | ||||||
|  |         real_person = Drinker(drink=beer) | ||||||
|  |         real_person.save() | ||||||
|  |  | ||||||
|  |         assert Drinker.objects[0].drink.name == red_bull.name | ||||||
|  |         assert Drinker.objects[1].drink.name == beer.name | ||||||
|  |  | ||||||
|  |  | ||||||
|  | if __name__ == "__main__": | ||||||
|  |     unittest.main() | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,22 +1,14 @@ | |||||||
| import unittest | import unittest | ||||||
| import uuid | import uuid | ||||||
| 
 | 
 | ||||||
| from nose.plugins.skip import SkipTest |  | ||||||
| from datetime import datetime | from datetime import datetime | ||||||
| from bson import ObjectId | from bson import ObjectId | ||||||
| 
 | 
 | ||||||
| import pymongo |  | ||||||
| 
 |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| 
 | from tests.utils import MongoDBTestCase | ||||||
| __all__ = ("TestJson",) |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class TestJson(unittest.TestCase): | class TestJson(MongoDBTestCase): | ||||||
| 
 |  | ||||||
|     def setUp(self): |  | ||||||
|         connect(db='mongoenginetest') |  | ||||||
| 
 |  | ||||||
|     def test_json_names(self): |     def test_json_names(self): | ||||||
|         """ |         """ | ||||||
|         Going to test reported issue: |         Going to test reported issue: | ||||||
| @@ -25,22 +17,24 @@ class TestJson(unittest.TestCase): | |||||||
|         a to_json with the original class names and not the abreviated |         a to_json with the original class names and not the abreviated | ||||||
|         mongodb document keys |         mongodb document keys | ||||||
|         """ |         """ | ||||||
|  | 
 | ||||||
|         class Embedded(EmbeddedDocument): |         class Embedded(EmbeddedDocument): | ||||||
|             string = StringField(db_field='s') |             string = StringField(db_field="s") | ||||||
| 
 | 
 | ||||||
|         class Doc(Document): |         class Doc(Document): | ||||||
|             string = StringField(db_field='s') |             string = StringField(db_field="s") | ||||||
|             embedded = EmbeddedDocumentField(Embedded, db_field='e') |             embedded = EmbeddedDocumentField(Embedded, db_field="e") | ||||||
| 
 | 
 | ||||||
|         doc = Doc(string="Hello", embedded=Embedded(string="Inner Hello")) |         doc = Doc(string="Hello", embedded=Embedded(string="Inner Hello")) | ||||||
|         doc_json = doc.to_json(sort_keys=True, use_db_field=False, separators=(',', ':')) |         doc_json = doc.to_json( | ||||||
|  |             sort_keys=True, use_db_field=False, separators=(",", ":") | ||||||
|  |         ) | ||||||
| 
 | 
 | ||||||
|         expected_json = """{"embedded":{"string":"Inner Hello"},"string":"Hello"}""" |         expected_json = """{"embedded":{"string":"Inner Hello"},"string":"Hello"}""" | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(doc_json, expected_json) |         assert doc_json == expected_json | ||||||
| 
 | 
 | ||||||
|     def test_json_simple(self): |     def test_json_simple(self): | ||||||
| 
 |  | ||||||
|         class Embedded(EmbeddedDocument): |         class Embedded(EmbeddedDocument): | ||||||
|             string = StringField() |             string = StringField() | ||||||
| 
 | 
 | ||||||
| @@ -49,16 +43,18 @@ class TestJson(unittest.TestCase): | |||||||
|             embedded_field = EmbeddedDocumentField(Embedded) |             embedded_field = EmbeddedDocumentField(Embedded) | ||||||
| 
 | 
 | ||||||
|             def __eq__(self, other): |             def __eq__(self, other): | ||||||
|                 return (self.string == other.string and |                 return ( | ||||||
|                         self.embedded_field == other.embedded_field) |                     self.string == other.string | ||||||
|  |                     and self.embedded_field == other.embedded_field | ||||||
|  |                 ) | ||||||
| 
 | 
 | ||||||
|         doc = Doc(string="Hi", embedded_field=Embedded(string="Hi")) |         doc = Doc(string="Hi", embedded_field=Embedded(string="Hi")) | ||||||
| 
 | 
 | ||||||
|         doc_json = doc.to_json(sort_keys=True, separators=(',', ':')) |         doc_json = doc.to_json(sort_keys=True, separators=(",", ":")) | ||||||
|         expected_json = """{"embedded_field":{"string":"Hi"},"string":"Hi"}""" |         expected_json = """{"embedded_field":{"string":"Hi"},"string":"Hi"}""" | ||||||
|         self.assertEqual(doc_json, expected_json) |         assert doc_json == expected_json | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(doc, Doc.from_json(doc.to_json())) |         assert doc == Doc.from_json(doc.to_json()) | ||||||
| 
 | 
 | ||||||
|     def test_json_complex(self): |     def test_json_complex(self): | ||||||
|         class EmbeddedDoc(EmbeddedDocument): |         class EmbeddedDoc(EmbeddedDocument): | ||||||
| @@ -68,41 +64,43 @@ class TestJson(unittest.TestCase): | |||||||
|             pass |             pass | ||||||
| 
 | 
 | ||||||
|         class Doc(Document): |         class Doc(Document): | ||||||
|             string_field = StringField(default='1') |             string_field = StringField(default="1") | ||||||
|             int_field = IntField(default=1) |             int_field = IntField(default=1) | ||||||
|             float_field = FloatField(default=1.1) |             float_field = FloatField(default=1.1) | ||||||
|             boolean_field = BooleanField(default=True) |             boolean_field = BooleanField(default=True) | ||||||
|             datetime_field = DateTimeField(default=datetime.now) |             datetime_field = DateTimeField(default=datetime.now) | ||||||
|             embedded_document_field = EmbeddedDocumentField(EmbeddedDoc, |             embedded_document_field = EmbeddedDocumentField( | ||||||
|                                         default=lambda: EmbeddedDoc()) |                 EmbeddedDoc, default=lambda: EmbeddedDoc() | ||||||
|  |             ) | ||||||
|             list_field = ListField(default=lambda: [1, 2, 3]) |             list_field = ListField(default=lambda: [1, 2, 3]) | ||||||
|             dict_field = DictField(default=lambda: {"hello": "world"}) |             dict_field = DictField(default=lambda: {"hello": "world"}) | ||||||
|             objectid_field = ObjectIdField(default=ObjectId) |             objectid_field = ObjectIdField(default=ObjectId) | ||||||
|             reference_field = ReferenceField(Simple, default=lambda: |             reference_field = ReferenceField(Simple, default=lambda: Simple().save()) | ||||||
|                                                         Simple().save()) |  | ||||||
|             map_field = MapField(IntField(), default=lambda: {"simple": 1}) |             map_field = MapField(IntField(), default=lambda: {"simple": 1}) | ||||||
|             decimal_field = DecimalField(default=1.0) |             decimal_field = DecimalField(default=1.0) | ||||||
|             complex_datetime_field = ComplexDateTimeField(default=datetime.now) |             complex_datetime_field = ComplexDateTimeField(default=datetime.now) | ||||||
|             url_field = URLField(default="http://mongoengine.org") |             url_field = URLField(default="http://mongoengine.org") | ||||||
|             dynamic_field = DynamicField(default=1) |             dynamic_field = DynamicField(default=1) | ||||||
|             generic_reference_field = GenericReferenceField( |             generic_reference_field = GenericReferenceField( | ||||||
|                                             default=lambda: Simple().save()) |                 default=lambda: Simple().save() | ||||||
|             sorted_list_field = SortedListField(IntField(), |             ) | ||||||
|                                                 default=lambda: [1, 2, 3]) |             sorted_list_field = SortedListField(IntField(), default=lambda: [1, 2, 3]) | ||||||
|             email_field = EmailField(default="ross@example.com") |             email_field = EmailField(default="ross@example.com") | ||||||
|             geo_point_field = GeoPointField(default=lambda: [1, 2]) |             geo_point_field = GeoPointField(default=lambda: [1, 2]) | ||||||
|             sequence_field = SequenceField() |             sequence_field = SequenceField() | ||||||
|             uuid_field = UUIDField(default=uuid.uuid4) |             uuid_field = UUIDField(default=uuid.uuid4) | ||||||
|             generic_embedded_document_field = GenericEmbeddedDocumentField( |             generic_embedded_document_field = GenericEmbeddedDocumentField( | ||||||
|                                         default=lambda: EmbeddedDoc()) |                 default=lambda: EmbeddedDoc() | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|             def __eq__(self, other): |             def __eq__(self, other): | ||||||
|                 import json |                 import json | ||||||
|  | 
 | ||||||
|                 return json.loads(self.to_json()) == json.loads(other.to_json()) |                 return json.loads(self.to_json()) == json.loads(other.to_json()) | ||||||
| 
 | 
 | ||||||
|         doc = Doc() |         doc = Doc() | ||||||
|         self.assertEqual(doc, Doc.from_json(doc.to_json())) |         assert doc == Doc.from_json(doc.to_json()) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| if __name__ == '__main__': | if __name__ == "__main__": | ||||||
|     unittest.main() |     unittest.main() | ||||||
| @@ -2,55 +2,60 @@ | |||||||
| import unittest | import unittest | ||||||
| from datetime import datetime | from datetime import datetime | ||||||
| 
 | 
 | ||||||
|  | import pytest | ||||||
|  | 
 | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| 
 | from tests.utils import MongoDBTestCase | ||||||
| __all__ = ("ValidatorErrorTest",) |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class ValidatorErrorTest(unittest.TestCase): | class TestValidatorError(MongoDBTestCase): | ||||||
| 
 |  | ||||||
|     def setUp(self): |  | ||||||
|         connect(db='mongoenginetest') |  | ||||||
| 
 |  | ||||||
|     def test_to_dict(self): |     def test_to_dict(self): | ||||||
|         """Ensure a ValidationError handles error to_dict correctly. |         """Ensure a ValidationError handles error to_dict correctly. | ||||||
|         """ |         """ | ||||||
|         error = ValidationError('root') |         error = ValidationError("root") | ||||||
|         self.assertEqual(error.to_dict(), {}) |         assert error.to_dict() == {} | ||||||
| 
 | 
 | ||||||
|         # 1st level error schema |         # 1st level error schema | ||||||
|         error.errors = {'1st': ValidationError('bad 1st'), } |         error.errors = {"1st": ValidationError("bad 1st")} | ||||||
|         self.assertIn('1st', error.to_dict()) |         assert "1st" in error.to_dict() | ||||||
|         self.assertEqual(error.to_dict()['1st'], 'bad 1st') |         assert error.to_dict()["1st"] == "bad 1st" | ||||||
| 
 | 
 | ||||||
|         # 2nd level error schema |         # 2nd level error schema | ||||||
|         error.errors = {'1st': ValidationError('bad 1st', errors={ |         error.errors = { | ||||||
|             '2nd': ValidationError('bad 2nd'), |             "1st": ValidationError( | ||||||
|         })} |                 "bad 1st", errors={"2nd": ValidationError("bad 2nd")} | ||||||
|         self.assertIn('1st', error.to_dict()) |             ) | ||||||
|         self.assertIsInstance(error.to_dict()['1st'], dict) |         } | ||||||
|         self.assertIn('2nd', error.to_dict()['1st']) |         assert "1st" in error.to_dict() | ||||||
|         self.assertEqual(error.to_dict()['1st']['2nd'], 'bad 2nd') |         assert isinstance(error.to_dict()["1st"], dict) | ||||||
|  |         assert "2nd" in error.to_dict()["1st"] | ||||||
|  |         assert error.to_dict()["1st"]["2nd"] == "bad 2nd" | ||||||
| 
 | 
 | ||||||
|         # moar levels |         # moar levels | ||||||
|         error.errors = {'1st': ValidationError('bad 1st', errors={ |         error.errors = { | ||||||
|             '2nd': ValidationError('bad 2nd', errors={ |             "1st": ValidationError( | ||||||
|                 '3rd': ValidationError('bad 3rd', errors={ |                 "bad 1st", | ||||||
|                     '4th': ValidationError('Inception'), |                 errors={ | ||||||
|                 }), |                     "2nd": ValidationError( | ||||||
|             }), |                         "bad 2nd", | ||||||
|         })} |                         errors={ | ||||||
|         self.assertIn('1st', error.to_dict()) |                             "3rd": ValidationError( | ||||||
|         self.assertIn('2nd', error.to_dict()['1st']) |                                 "bad 3rd", errors={"4th": ValidationError("Inception")} | ||||||
|         self.assertIn('3rd', error.to_dict()['1st']['2nd']) |                             ) | ||||||
|         self.assertIn('4th', error.to_dict()['1st']['2nd']['3rd']) |                         }, | ||||||
|         self.assertEqual(error.to_dict()['1st']['2nd']['3rd']['4th'], |                     ) | ||||||
|                          'Inception') |                 }, | ||||||
|  |             ) | ||||||
|  |         } | ||||||
|  |         assert "1st" in error.to_dict() | ||||||
|  |         assert "2nd" in error.to_dict()["1st"] | ||||||
|  |         assert "3rd" in error.to_dict()["1st"]["2nd"] | ||||||
|  |         assert "4th" in error.to_dict()["1st"]["2nd"]["3rd"] | ||||||
|  |         assert error.to_dict()["1st"]["2nd"]["3rd"]["4th"] == "Inception" | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(error.message, "root(2nd.3rd.4th.Inception: ['1st'])") |         assert error.message == "root(2nd.3rd.4th.Inception: ['1st'])" | ||||||
| 
 | 
 | ||||||
|     def test_model_validation(self): |     def test_model_validation(self): | ||||||
| 
 |  | ||||||
|         class User(Document): |         class User(Document): | ||||||
|             username = StringField(primary_key=True) |             username = StringField(primary_key=True) | ||||||
|             name = StringField(required=True) |             name = StringField(required=True) | ||||||
| @@ -58,67 +63,69 @@ class ValidatorErrorTest(unittest.TestCase): | |||||||
|         try: |         try: | ||||||
|             User().validate() |             User().validate() | ||||||
|         except ValidationError as e: |         except ValidationError as e: | ||||||
|             self.assertIn("User:None", e.message) |             assert "User:None" in e.message | ||||||
|             self.assertEqual(e.to_dict(), { |             assert e.to_dict() == { | ||||||
|                 'username': 'Field is required', |                 "username": "Field is required", | ||||||
|                 'name': 'Field is required'}) |                 "name": "Field is required", | ||||||
|  |             } | ||||||
| 
 | 
 | ||||||
|         user = User(username="RossC0", name="Ross").save() |         user = User(username="RossC0", name="Ross").save() | ||||||
|         user.name = None |         user.name = None | ||||||
|         try: |         try: | ||||||
|             user.save() |             user.save() | ||||||
|         except ValidationError as e: |         except ValidationError as e: | ||||||
|             self.assertIn("User:RossC0", e.message) |             assert "User:RossC0" in e.message | ||||||
|             self.assertEqual(e.to_dict(), { |             assert e.to_dict() == {"name": "Field is required"} | ||||||
|                 'name': 'Field is required'}) |  | ||||||
| 
 | 
 | ||||||
|     def test_fields_rewrite(self): |     def test_fields_rewrite(self): | ||||||
|         class BasePerson(Document): |         class BasePerson(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             age = IntField() |             age = IntField() | ||||||
|             meta = {'abstract': True} |             meta = {"abstract": True} | ||||||
| 
 | 
 | ||||||
|         class Person(BasePerson): |         class Person(BasePerson): | ||||||
|             name = StringField(required=True) |             name = StringField(required=True) | ||||||
| 
 | 
 | ||||||
|         p = Person(age=15) |         p = Person(age=15) | ||||||
|         self.assertRaises(ValidationError, p.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             p.validate() | ||||||
| 
 | 
 | ||||||
|     def test_embedded_document_validation(self): |     def test_embedded_document_validation(self): | ||||||
|         """Ensure that embedded documents may be validated. |         """Ensure that embedded documents may be validated. | ||||||
|         """ |         """ | ||||||
|  | 
 | ||||||
|         class Comment(EmbeddedDocument): |         class Comment(EmbeddedDocument): | ||||||
|             date = DateTimeField() |             date = DateTimeField() | ||||||
|             content = StringField(required=True) |             content = StringField(required=True) | ||||||
| 
 | 
 | ||||||
|         comment = Comment() |         comment = Comment() | ||||||
|         self.assertRaises(ValidationError, comment.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             comment.validate() | ||||||
| 
 | 
 | ||||||
|         comment.content = 'test' |         comment.content = "test" | ||||||
|         comment.validate() |         comment.validate() | ||||||
| 
 | 
 | ||||||
|         comment.date = 4 |         comment.date = 4 | ||||||
|         self.assertRaises(ValidationError, comment.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             comment.validate() | ||||||
| 
 | 
 | ||||||
|         comment.date = datetime.now() |         comment.date = datetime.now() | ||||||
|         comment.validate() |         comment.validate() | ||||||
|         self.assertEqual(comment._instance, None) |         assert comment._instance is None | ||||||
| 
 | 
 | ||||||
|     def test_embedded_db_field_validate(self): |     def test_embedded_db_field_validate(self): | ||||||
| 
 |  | ||||||
|         class SubDoc(EmbeddedDocument): |         class SubDoc(EmbeddedDocument): | ||||||
|             val = IntField(required=True) |             val = IntField(required=True) | ||||||
| 
 | 
 | ||||||
|         class Doc(Document): |         class Doc(Document): | ||||||
|             id = StringField(primary_key=True) |             id = StringField(primary_key=True) | ||||||
|             e = EmbeddedDocumentField(SubDoc, db_field='eb') |             e = EmbeddedDocumentField(SubDoc, db_field="eb") | ||||||
| 
 | 
 | ||||||
|         try: |         try: | ||||||
|             Doc(id="bad").validate() |             Doc(id="bad").validate() | ||||||
|         except ValidationError as e: |         except ValidationError as e: | ||||||
|             self.assertIn("SubDoc:None", e.message) |             assert "SubDoc:None" in e.message | ||||||
|             self.assertEqual(e.to_dict(), { |             assert e.to_dict() == {"e": {"val": "OK could not be converted to int"}} | ||||||
|                 "e": {'val': 'OK could not be converted to int'}}) |  | ||||||
| 
 | 
 | ||||||
|         Doc.drop_collection() |         Doc.drop_collection() | ||||||
| 
 | 
 | ||||||
| @@ -126,25 +133,23 @@ class ValidatorErrorTest(unittest.TestCase): | |||||||
| 
 | 
 | ||||||
|         doc = Doc.objects.first() |         doc = Doc.objects.first() | ||||||
|         keys = doc._data.keys() |         keys = doc._data.keys() | ||||||
|         self.assertEqual(2, len(keys)) |         assert 2 == len(keys) | ||||||
|         self.assertIn('e', keys) |         assert "e" in keys | ||||||
|         self.assertIn('id', keys) |         assert "id" in keys | ||||||
| 
 | 
 | ||||||
|         doc.e.val = "OK" |         doc.e.val = "OK" | ||||||
|         try: |         try: | ||||||
|             doc.save() |             doc.save() | ||||||
|         except ValidationError as e: |         except ValidationError as e: | ||||||
|             self.assertIn("Doc:test", e.message) |             assert "Doc:test" in e.message | ||||||
|             self.assertEqual(e.to_dict(), { |             assert e.to_dict() == {"e": {"val": "OK could not be converted to int"}} | ||||||
|                 "e": {'val': 'OK could not be converted to int'}}) |  | ||||||
| 
 | 
 | ||||||
|     def test_embedded_weakref(self): |     def test_embedded_weakref(self): | ||||||
| 
 |  | ||||||
|         class SubDoc(EmbeddedDocument): |         class SubDoc(EmbeddedDocument): | ||||||
|             val = IntField(required=True) |             val = IntField(required=True) | ||||||
| 
 | 
 | ||||||
|         class Doc(Document): |         class Doc(Document): | ||||||
|             e = EmbeddedDocumentField(SubDoc, db_field='eb') |             e = EmbeddedDocumentField(SubDoc, db_field="eb") | ||||||
| 
 | 
 | ||||||
|         Doc.drop_collection() |         Doc.drop_collection() | ||||||
| 
 | 
 | ||||||
| @@ -153,23 +158,26 @@ class ValidatorErrorTest(unittest.TestCase): | |||||||
| 
 | 
 | ||||||
|         s = SubDoc() |         s = SubDoc() | ||||||
| 
 | 
 | ||||||
|         self.assertRaises(ValidationError, s.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             s.validate() | ||||||
| 
 | 
 | ||||||
|         d1.e = s |         d1.e = s | ||||||
|         d2.e = s |         d2.e = s | ||||||
| 
 | 
 | ||||||
|         del d1 |         del d1 | ||||||
| 
 | 
 | ||||||
|         self.assertRaises(ValidationError, d2.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             d2.validate() | ||||||
| 
 | 
 | ||||||
|     def test_parent_reference_in_child_document(self): |     def test_parent_reference_in_child_document(self): | ||||||
|         """ |         """ | ||||||
|         Test to ensure a ReferenceField can store a reference to a parent |         Test to ensure a ReferenceField can store a reference to a parent | ||||||
|         class when inherited. Issue #954. |         class when inherited. Issue #954. | ||||||
|         """ |         """ | ||||||
|  | 
 | ||||||
|         class Parent(Document): |         class Parent(Document): | ||||||
|             meta = {'allow_inheritance': True} |             meta = {"allow_inheritance": True} | ||||||
|             reference = ReferenceField('self') |             reference = ReferenceField("self") | ||||||
| 
 | 
 | ||||||
|         class Child(Parent): |         class Child(Parent): | ||||||
|             pass |             pass | ||||||
| @@ -190,9 +198,10 @@ class ValidatorErrorTest(unittest.TestCase): | |||||||
|         Test to ensure a ReferenceField can store a reference to a parent |         Test to ensure a ReferenceField can store a reference to a parent | ||||||
|         class when inherited and when set via attribute. Issue #954. |         class when inherited and when set via attribute. Issue #954. | ||||||
|         """ |         """ | ||||||
|  | 
 | ||||||
|         class Parent(Document): |         class Parent(Document): | ||||||
|             meta = {'allow_inheritance': True} |             meta = {"allow_inheritance": True} | ||||||
|             reference = ReferenceField('self') |             reference = ReferenceField("self") | ||||||
| 
 | 
 | ||||||
|         class Child(Parent): |         class Child(Parent): | ||||||
|             pass |             pass | ||||||
| @@ -210,5 +219,5 @@ class ValidatorErrorTest(unittest.TestCase): | |||||||
|             self.fail("ValidationError raised: %s" % e.message) |             self.fail("ValidationError raised: %s" % e.message) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| if __name__ == '__main__': | if __name__ == "__main__": | ||||||
|     unittest.main() |     unittest.main() | ||||||
| @@ -1,3 +0,0 @@ | |||||||
| from .fields import * |  | ||||||
| from .file_tests import * |  | ||||||
| from .geo import * |  | ||||||
|   | |||||||
| @@ -1,27 +1,28 @@ | |||||||
| # -*- coding: utf-8 -*- | # -*- coding: utf-8 -*- | ||||||
| import uuid | import uuid | ||||||
|  |  | ||||||
| from nose.plugins.skip import SkipTest |  | ||||||
| import six |  | ||||||
|  |  | ||||||
| from bson import Binary | from bson import Binary | ||||||
|  | import pytest | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| from tests.utils import MongoDBTestCase | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
| BIN_VALUE = six.b('\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5') | BIN_VALUE = "\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5".encode( | ||||||
|  |     "latin-1" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestBinaryField(MongoDBTestCase): | class TestBinaryField(MongoDBTestCase): | ||||||
|     def test_binary_fields(self): |     def test_binary_fields(self): | ||||||
|         """Ensure that binary fields can be stored and retrieved. |         """Ensure that binary fields can be stored and retrieved. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class Attachment(Document): |         class Attachment(Document): | ||||||
|             content_type = StringField() |             content_type = StringField() | ||||||
|             blob = BinaryField() |             blob = BinaryField() | ||||||
|  |  | ||||||
|         BLOB = six.b('\xe6\x00\xc4\xff\x07') |         BLOB = "\xe6\x00\xc4\xff\x07".encode("latin-1") | ||||||
|         MIME_TYPE = 'application/octet-stream' |         MIME_TYPE = "application/octet-stream" | ||||||
|  |  | ||||||
|         Attachment.drop_collection() |         Attachment.drop_collection() | ||||||
|  |  | ||||||
| @@ -29,12 +30,13 @@ class TestBinaryField(MongoDBTestCase): | |||||||
|         attachment.save() |         attachment.save() | ||||||
|  |  | ||||||
|         attachment_1 = Attachment.objects().first() |         attachment_1 = Attachment.objects().first() | ||||||
|         self.assertEqual(MIME_TYPE, attachment_1.content_type) |         assert MIME_TYPE == attachment_1.content_type | ||||||
|         self.assertEqual(BLOB, six.binary_type(attachment_1.blob)) |         assert BLOB == bytes(attachment_1.blob) | ||||||
|  |  | ||||||
|     def test_validation_succeeds(self): |     def test_validation_succeeds(self): | ||||||
|         """Ensure that valid values can be assigned to binary fields. |         """Ensure that valid values can be assigned to binary fields. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class AttachmentRequired(Document): |         class AttachmentRequired(Document): | ||||||
|             blob = BinaryField(required=True) |             blob = BinaryField(required=True) | ||||||
|  |  | ||||||
| @@ -42,13 +44,15 @@ class TestBinaryField(MongoDBTestCase): | |||||||
|             blob = BinaryField(max_bytes=4) |             blob = BinaryField(max_bytes=4) | ||||||
|  |  | ||||||
|         attachment_required = AttachmentRequired() |         attachment_required = AttachmentRequired() | ||||||
|         self.assertRaises(ValidationError, attachment_required.validate) |         with pytest.raises(ValidationError): | ||||||
|         attachment_required.blob = Binary(six.b('\xe6\x00\xc4\xff\x07')) |             attachment_required.validate() | ||||||
|  |         attachment_required.blob = Binary("\xe6\x00\xc4\xff\x07".encode("latin-1")) | ||||||
|         attachment_required.validate() |         attachment_required.validate() | ||||||
|  |  | ||||||
|         _5_BYTES = six.b('\xe6\x00\xc4\xff\x07') |         _5_BYTES = "\xe6\x00\xc4\xff\x07".encode("latin-1") | ||||||
|         _4_BYTES = six.b('\xe6\x00\xc4\xff') |         _4_BYTES = "\xe6\x00\xc4\xff".encode("latin-1") | ||||||
|         self.assertRaises(ValidationError, AttachmentSizeLimit(blob=_5_BYTES).validate) |         with pytest.raises(ValidationError): | ||||||
|  |             AttachmentSizeLimit(blob=_5_BYTES).validate() | ||||||
|         AttachmentSizeLimit(blob=_4_BYTES).validate() |         AttachmentSizeLimit(blob=_4_BYTES).validate() | ||||||
|  |  | ||||||
|     def test_validation_fails(self): |     def test_validation_fails(self): | ||||||
| @@ -57,8 +61,9 @@ class TestBinaryField(MongoDBTestCase): | |||||||
|         class Attachment(Document): |         class Attachment(Document): | ||||||
|             blob = BinaryField() |             blob = BinaryField() | ||||||
|  |  | ||||||
|         for invalid_data in (2, u'Im_a_unicode', ['some_str']): |         for invalid_data in (2, u"Im_a_unicode", ["some_str"]): | ||||||
|             self.assertRaises(ValidationError, Attachment(blob=invalid_data).validate) |             with pytest.raises(ValidationError): | ||||||
|  |                 Attachment(blob=invalid_data).validate() | ||||||
|  |  | ||||||
|     def test__primary(self): |     def test__primary(self): | ||||||
|         class Attachment(Document): |         class Attachment(Document): | ||||||
| @@ -67,23 +72,21 @@ class TestBinaryField(MongoDBTestCase): | |||||||
|         Attachment.drop_collection() |         Attachment.drop_collection() | ||||||
|         binary_id = uuid.uuid4().bytes |         binary_id = uuid.uuid4().bytes | ||||||
|         att = Attachment(id=binary_id).save() |         att = Attachment(id=binary_id).save() | ||||||
|         self.assertEqual(1, Attachment.objects.count()) |         assert 1 == Attachment.objects.count() | ||||||
|         self.assertEqual(1, Attachment.objects.filter(id=att.id).count()) |         assert 1 == Attachment.objects.filter(id=att.id).count() | ||||||
|         att.delete() |         att.delete() | ||||||
|         self.assertEqual(0, Attachment.objects.count()) |         assert 0 == Attachment.objects.count() | ||||||
|  |  | ||||||
|     def test_primary_filter_by_binary_pk_as_str(self): |     def test_primary_filter_by_binary_pk_as_str(self): | ||||||
|         raise SkipTest("Querying by id as string is not currently supported") |  | ||||||
|  |  | ||||||
|         class Attachment(Document): |         class Attachment(Document): | ||||||
|             id = BinaryField(primary_key=True) |             id = BinaryField(primary_key=True) | ||||||
|  |  | ||||||
|         Attachment.drop_collection() |         Attachment.drop_collection() | ||||||
|         binary_id = uuid.uuid4().bytes |         binary_id = uuid.uuid4().bytes | ||||||
|         att = Attachment(id=binary_id).save() |         att = Attachment(id=binary_id).save() | ||||||
|         self.assertEqual(1, Attachment.objects.filter(id=binary_id).count()) |         assert 1 == Attachment.objects.filter(id=binary_id).count() | ||||||
|         att.delete() |         att.delete() | ||||||
|         self.assertEqual(0, Attachment.objects.count()) |         assert 0 == Attachment.objects.count() | ||||||
|  |  | ||||||
|     def test_match_querying_with_bytes(self): |     def test_match_querying_with_bytes(self): | ||||||
|         class MyDocument(Document): |         class MyDocument(Document): | ||||||
| @@ -93,7 +96,7 @@ class TestBinaryField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         doc = MyDocument(bin_field=BIN_VALUE).save() |         doc = MyDocument(bin_field=BIN_VALUE).save() | ||||||
|         matched_doc = MyDocument.objects(bin_field=BIN_VALUE).first() |         matched_doc = MyDocument.objects(bin_field=BIN_VALUE).first() | ||||||
|         self.assertEqual(matched_doc.id, doc.id) |         assert matched_doc.id == doc.id | ||||||
|  |  | ||||||
|     def test_match_querying_with_binary(self): |     def test_match_querying_with_binary(self): | ||||||
|         class MyDocument(Document): |         class MyDocument(Document): | ||||||
| @@ -104,40 +107,37 @@ class TestBinaryField(MongoDBTestCase): | |||||||
|         doc = MyDocument(bin_field=BIN_VALUE).save() |         doc = MyDocument(bin_field=BIN_VALUE).save() | ||||||
|  |  | ||||||
|         matched_doc = MyDocument.objects(bin_field=Binary(BIN_VALUE)).first() |         matched_doc = MyDocument.objects(bin_field=Binary(BIN_VALUE)).first() | ||||||
|         self.assertEqual(matched_doc.id, doc.id) |         assert matched_doc.id == doc.id | ||||||
|  |  | ||||||
|     def test_modify_operation__set(self): |     def test_modify_operation__set(self): | ||||||
|         """Ensures no regression of bug #1127""" |         """Ensures no regression of bug #1127""" | ||||||
|  |  | ||||||
|         class MyDocument(Document): |         class MyDocument(Document): | ||||||
|             some_field = StringField() |             some_field = StringField() | ||||||
|             bin_field = BinaryField() |             bin_field = BinaryField() | ||||||
|  |  | ||||||
|         MyDocument.drop_collection() |         MyDocument.drop_collection() | ||||||
|  |  | ||||||
|         doc = MyDocument.objects(some_field='test').modify( |         doc = MyDocument.objects(some_field="test").modify( | ||||||
|             upsert=True, new=True, |             upsert=True, new=True, set__bin_field=BIN_VALUE | ||||||
|             set__bin_field=BIN_VALUE |  | ||||||
|         ) |         ) | ||||||
|         self.assertEqual(doc.some_field, 'test') |         assert doc.some_field == "test" | ||||||
|         if six.PY3: |         assert doc.bin_field == BIN_VALUE | ||||||
|             self.assertEqual(doc.bin_field, BIN_VALUE) |  | ||||||
|         else: |  | ||||||
|             self.assertEqual(doc.bin_field, Binary(BIN_VALUE)) |  | ||||||
|  |  | ||||||
|     def test_update_one(self): |     def test_update_one(self): | ||||||
|         """Ensures no regression of bug #1127""" |         """Ensures no regression of bug #1127""" | ||||||
|  |  | ||||||
|         class MyDocument(Document): |         class MyDocument(Document): | ||||||
|             bin_field = BinaryField() |             bin_field = BinaryField() | ||||||
|  |  | ||||||
|         MyDocument.drop_collection() |         MyDocument.drop_collection() | ||||||
|  |  | ||||||
|         bin_data = six.b('\xe6\x00\xc4\xff\x07') |         bin_data = "\xe6\x00\xc4\xff\x07".encode("latin-1") | ||||||
|         doc = MyDocument(bin_field=bin_data).save() |         doc = MyDocument(bin_field=bin_data).save() | ||||||
|  |  | ||||||
|         n_updated = MyDocument.objects(bin_field=bin_data).update_one(bin_field=BIN_VALUE) |         n_updated = MyDocument.objects(bin_field=bin_data).update_one( | ||||||
|         self.assertEqual(n_updated, 1) |             bin_field=BIN_VALUE | ||||||
|  |         ) | ||||||
|  |         assert n_updated == 1 | ||||||
|         fetched = MyDocument.objects.with_id(doc.id) |         fetched = MyDocument.objects.with_id(doc.id) | ||||||
|         if six.PY3: |         assert fetched.bin_field == BIN_VALUE | ||||||
|             self.assertEqual(fetched.bin_field, BIN_VALUE) |  | ||||||
|         else: |  | ||||||
|             self.assertEqual(fetched.bin_field, Binary(BIN_VALUE)) |  | ||||||
|   | |||||||
| @@ -1,6 +1,7 @@ | |||||||
| # -*- coding: utf-8 -*- | # -*- coding: utf-8 -*- | ||||||
| from mongoengine import * | import pytest | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
| from tests.utils import MongoDBTestCase, get_as_pymongo | from tests.utils import MongoDBTestCase, get_as_pymongo | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -11,15 +12,13 @@ class TestBooleanField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         person = Person(admin=True) |         person = Person(admin=True) | ||||||
|         person.save() |         person.save() | ||||||
|         self.assertEqual( |         assert get_as_pymongo(person) == {"_id": person.id, "admin": True} | ||||||
|             get_as_pymongo(person), |  | ||||||
|             {'_id': person.id, |  | ||||||
|              'admin': True}) |  | ||||||
|  |  | ||||||
|     def test_validation(self): |     def test_validation(self): | ||||||
|         """Ensure that invalid values cannot be assigned to boolean |         """Ensure that invalid values cannot be assigned to boolean | ||||||
|         fields. |         fields. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             admin = BooleanField() |             admin = BooleanField() | ||||||
|  |  | ||||||
| @@ -28,22 +27,26 @@ class TestBooleanField(MongoDBTestCase): | |||||||
|         person.validate() |         person.validate() | ||||||
|  |  | ||||||
|         person.admin = 2 |         person.admin = 2 | ||||||
|         self.assertRaises(ValidationError, person.validate) |         with pytest.raises(ValidationError): | ||||||
|         person.admin = 'Yes' |             person.validate() | ||||||
|         self.assertRaises(ValidationError, person.validate) |         person.admin = "Yes" | ||||||
|         person.admin = 'False' |         with pytest.raises(ValidationError): | ||||||
|         self.assertRaises(ValidationError, person.validate) |             person.validate() | ||||||
|  |         person.admin = "False" | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             person.validate() | ||||||
|  |  | ||||||
|     def test_weirdness_constructor(self): |     def test_weirdness_constructor(self): | ||||||
|         """When attribute is set in contructor, it gets cast into a bool |         """When attribute is set in contructor, it gets cast into a bool | ||||||
|         which causes some weird behavior. We dont necessarily want to maintain this behavior |         which causes some weird behavior. We dont necessarily want to maintain this behavior | ||||||
|         but its a known issue |         but its a known issue | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             admin = BooleanField() |             admin = BooleanField() | ||||||
|  |  | ||||||
|         new_person = Person(admin='False') |         new_person = Person(admin="False") | ||||||
|         self.assertTrue(new_person.admin) |         assert new_person.admin | ||||||
|  |  | ||||||
|         new_person = Person(admin='0') |         new_person = Person(admin="0") | ||||||
|         self.assertTrue(new_person.admin) |         assert new_person.admin | ||||||
|   | |||||||
| @@ -1,18 +1,19 @@ | |||||||
| # -*- coding: utf-8 -*- | # -*- coding: utf-8 -*- | ||||||
| from decimal import Decimal | from decimal import Decimal | ||||||
|  |  | ||||||
| from mongoengine import * | import pytest | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
| from tests.utils import MongoDBTestCase | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestCachedReferenceField(MongoDBTestCase): | class TestCachedReferenceField(MongoDBTestCase): | ||||||
|  |  | ||||||
|     def test_get_and_save(self): |     def test_get_and_save(self): | ||||||
|         """ |         """ | ||||||
|         Tests #1047: CachedReferenceField creates DBRefs on to_python, |         Tests #1047: CachedReferenceField creates DBRefs on to_python, | ||||||
|         but can't save them on to_mongo. |         but can't save them on to_mongo. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class Animal(Document): |         class Animal(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             tag = StringField() |             tag = StringField() | ||||||
| @@ -24,10 +25,11 @@ class TestCachedReferenceField(MongoDBTestCase): | |||||||
|         Animal.drop_collection() |         Animal.drop_collection() | ||||||
|         Ocorrence.drop_collection() |         Ocorrence.drop_collection() | ||||||
|  |  | ||||||
|         Ocorrence(person="testte", |         Ocorrence( | ||||||
|                   animal=Animal(name="Leopard", tag="heavy").save()).save() |             person="testte", animal=Animal(name="Leopard", tag="heavy").save() | ||||||
|  |         ).save() | ||||||
|         p = Ocorrence.objects.get() |         p = Ocorrence.objects.get() | ||||||
|         p.person = 'new_testte' |         p.person = "new_testte" | ||||||
|         p.save() |         p.save() | ||||||
|  |  | ||||||
|     def test_general_things(self): |     def test_general_things(self): | ||||||
| @@ -37,8 +39,7 @@ class TestCachedReferenceField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         class Ocorrence(Document): |         class Ocorrence(Document): | ||||||
|             person = StringField() |             person = StringField() | ||||||
|             animal = CachedReferenceField( |             animal = CachedReferenceField(Animal, fields=["tag"]) | ||||||
|                 Animal, fields=['tag']) |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |         Animal.drop_collection() | ||||||
|         Ocorrence.drop_collection() |         Ocorrence.drop_collection() | ||||||
| @@ -46,30 +47,29 @@ class TestCachedReferenceField(MongoDBTestCase): | |||||||
|         a = Animal(name="Leopard", tag="heavy") |         a = Animal(name="Leopard", tag="heavy") | ||||||
|         a.save() |         a.save() | ||||||
|  |  | ||||||
|         self.assertEqual(Animal._cached_reference_fields, [Ocorrence.animal]) |         assert Animal._cached_reference_fields == [Ocorrence.animal] | ||||||
|         o = Ocorrence(person="teste", animal=a) |         o = Ocorrence(person="teste", animal=a) | ||||||
|         o.save() |         o.save() | ||||||
|  |  | ||||||
|         p = Ocorrence(person="Wilson") |         p = Ocorrence(person="Wilson") | ||||||
|         p.save() |         p.save() | ||||||
|  |  | ||||||
|         self.assertEqual(Ocorrence.objects(animal=None).count(), 1) |         assert Ocorrence.objects(animal=None).count() == 1 | ||||||
|  |  | ||||||
|         self.assertEqual( |         assert a.to_mongo(fields=["tag"]) == {"tag": "heavy", "_id": a.pk} | ||||||
|             a.to_mongo(fields=['tag']), {'tag': 'heavy', "_id": a.pk}) |  | ||||||
|  |  | ||||||
|         self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy') |         assert o.to_mongo()["animal"]["tag"] == "heavy" | ||||||
|  |  | ||||||
|         # counts |         # counts | ||||||
|         Ocorrence(person="teste 2").save() |         Ocorrence(person="teste 2").save() | ||||||
|         Ocorrence(person="teste 3").save() |         Ocorrence(person="teste 3").save() | ||||||
|  |  | ||||||
|         count = Ocorrence.objects(animal__tag='heavy').count() |         count = Ocorrence.objects(animal__tag="heavy").count() | ||||||
|         self.assertEqual(count, 1) |         assert count == 1 | ||||||
|  |  | ||||||
|         ocorrence = Ocorrence.objects(animal__tag='heavy').first() |         ocorrence = Ocorrence.objects(animal__tag="heavy").first() | ||||||
|         self.assertEqual(ocorrence.person, "teste") |         assert ocorrence.person == "teste" | ||||||
|         self.assertIsInstance(ocorrence.animal, Animal) |         assert isinstance(ocorrence.animal, Animal) | ||||||
|  |  | ||||||
|     def test_with_decimal(self): |     def test_with_decimal(self): | ||||||
|         class PersonAuto(Document): |         class PersonAuto(Document): | ||||||
| @@ -78,28 +78,22 @@ class TestCachedReferenceField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         class SocialTest(Document): |         class SocialTest(Document): | ||||||
|             group = StringField() |             group = StringField() | ||||||
|             person = CachedReferenceField( |             person = CachedReferenceField(PersonAuto, fields=("salary",)) | ||||||
|                 PersonAuto, |  | ||||||
|                 fields=('salary',)) |  | ||||||
|  |  | ||||||
|         PersonAuto.drop_collection() |         PersonAuto.drop_collection() | ||||||
|         SocialTest.drop_collection() |         SocialTest.drop_collection() | ||||||
|  |  | ||||||
|         p = PersonAuto(name="Alberto", salary=Decimal('7000.00')) |         p = PersonAuto(name="Alberto", salary=Decimal("7000.00")) | ||||||
|         p.save() |         p.save() | ||||||
|  |  | ||||||
|         s = SocialTest(group="dev", person=p) |         s = SocialTest(group="dev", person=p) | ||||||
|         s.save() |         s.save() | ||||||
|  |  | ||||||
|         self.assertEqual( |         assert SocialTest.objects._collection.find_one({"person.salary": 7000.00}) == { | ||||||
|             SocialTest.objects._collection.find_one({'person.salary': 7000.00}), { |             "_id": s.pk, | ||||||
|                 '_id': s.pk, |             "group": s.group, | ||||||
|                 'group': s.group, |             "person": {"_id": p.pk, "salary": 7000.00}, | ||||||
|                 'person': { |         } | ||||||
|                     '_id': p.pk, |  | ||||||
|                     'salary': 7000.00 |  | ||||||
|                 } |  | ||||||
|             }) |  | ||||||
|  |  | ||||||
|     def test_cached_reference_field_reference(self): |     def test_cached_reference_field_reference(self): | ||||||
|         class Group(Document): |         class Group(Document): | ||||||
| @@ -111,17 +105,14 @@ class TestCachedReferenceField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         class SocialData(Document): |         class SocialData(Document): | ||||||
|             obs = StringField() |             obs = StringField() | ||||||
|             tags = ListField( |             tags = ListField(StringField()) | ||||||
|                 StringField()) |             person = CachedReferenceField(Person, fields=("group",)) | ||||||
|             person = CachedReferenceField( |  | ||||||
|                 Person, |  | ||||||
|                 fields=('group',)) |  | ||||||
|  |  | ||||||
|         Group.drop_collection() |         Group.drop_collection() | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|         SocialData.drop_collection() |         SocialData.drop_collection() | ||||||
|  |  | ||||||
|         g1 = Group(name='dev') |         g1 = Group(name="dev") | ||||||
|         g1.save() |         g1.save() | ||||||
|  |  | ||||||
|         g2 = Group(name="designers") |         g2 = Group(name="designers") | ||||||
| @@ -136,25 +127,21 @@ class TestCachedReferenceField(MongoDBTestCase): | |||||||
|         p3 = Person(name="Afro design", group=g2) |         p3 = Person(name="Afro design", group=g2) | ||||||
|         p3.save() |         p3.save() | ||||||
|  |  | ||||||
|         s1 = SocialData(obs="testing 123", person=p1, tags=['tag1', 'tag2']) |         s1 = SocialData(obs="testing 123", person=p1, tags=["tag1", "tag2"]) | ||||||
|         s1.save() |         s1.save() | ||||||
|  |  | ||||||
|         s2 = SocialData(obs="testing 321", person=p3, tags=['tag3', 'tag4']) |         s2 = SocialData(obs="testing 321", person=p3, tags=["tag3", "tag4"]) | ||||||
|         s2.save() |         s2.save() | ||||||
|  |  | ||||||
|         self.assertEqual(SocialData.objects._collection.find_one( |         assert SocialData.objects._collection.find_one({"tags": "tag2"}) == { | ||||||
|             {'tags': 'tag2'}), { |             "_id": s1.pk, | ||||||
|                 '_id': s1.pk, |             "obs": "testing 123", | ||||||
|                 'obs': 'testing 123', |             "tags": ["tag1", "tag2"], | ||||||
|                 'tags': ['tag1', 'tag2'], |             "person": {"_id": p1.pk, "group": g1.pk}, | ||||||
|                 'person': { |         } | ||||||
|                     '_id': p1.pk, |  | ||||||
|                     'group': g1.pk |  | ||||||
|                 } |  | ||||||
|         }) |  | ||||||
|  |  | ||||||
|         self.assertEqual(SocialData.objects(person__group=g2).count(), 1) |         assert SocialData.objects(person__group=g2).count() == 1 | ||||||
|         self.assertEqual(SocialData.objects(person__group=g2).first(), s2) |         assert SocialData.objects(person__group=g2).first() == s2 | ||||||
|  |  | ||||||
|     def test_cached_reference_field_push_with_fields(self): |     def test_cached_reference_field_push_with_fields(self): | ||||||
|         class Product(Document): |         class Product(Document): | ||||||
| @@ -163,185 +150,136 @@ class TestCachedReferenceField(MongoDBTestCase): | |||||||
|         Product.drop_collection() |         Product.drop_collection() | ||||||
|  |  | ||||||
|         class Basket(Document): |         class Basket(Document): | ||||||
|             products = ListField(CachedReferenceField(Product, fields=['name'])) |             products = ListField(CachedReferenceField(Product, fields=["name"])) | ||||||
|  |  | ||||||
|         Basket.drop_collection() |         Basket.drop_collection() | ||||||
|         product1 = Product(name='abc').save() |         product1 = Product(name="abc").save() | ||||||
|         product2 = Product(name='def').save() |         product2 = Product(name="def").save() | ||||||
|         basket = Basket(products=[product1]).save() |         basket = Basket(products=[product1]).save() | ||||||
|         self.assertEqual( |         assert Basket.objects._collection.find_one() == { | ||||||
|             Basket.objects._collection.find_one(), |             "_id": basket.pk, | ||||||
|             { |             "products": [{"_id": product1.pk, "name": product1.name}], | ||||||
|                 '_id': basket.pk, |         } | ||||||
|                 'products': [ |  | ||||||
|                     { |  | ||||||
|                         '_id': product1.pk, |  | ||||||
|                         'name': product1.name |  | ||||||
|                     } |  | ||||||
|                 ] |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
|         # push to list |         # push to list | ||||||
|         basket.update(push__products=product2) |         basket.update(push__products=product2) | ||||||
|         basket.reload() |         basket.reload() | ||||||
|         self.assertEqual( |         assert Basket.objects._collection.find_one() == { | ||||||
|             Basket.objects._collection.find_one(), |             "_id": basket.pk, | ||||||
|             { |             "products": [ | ||||||
|                 '_id': basket.pk, |                 {"_id": product1.pk, "name": product1.name}, | ||||||
|                 'products': [ |                 {"_id": product2.pk, "name": product2.name}, | ||||||
|                     { |             ], | ||||||
|                         '_id': product1.pk, |         } | ||||||
|                         'name': product1.name |  | ||||||
|                     }, |  | ||||||
|                     { |  | ||||||
|                         '_id': product2.pk, |  | ||||||
|                         'name': product2.name |  | ||||||
|                     } |  | ||||||
|                 ] |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_cached_reference_field_update_all(self): |     def test_cached_reference_field_update_all(self): | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             TYPES = ( |             TYPES = (("pf", "PF"), ("pj", "PJ")) | ||||||
|                 ('pf', "PF"), |  | ||||||
|                 ('pj', "PJ") |  | ||||||
|             ) |  | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             tp = StringField(choices=TYPES) |             tp = StringField(choices=TYPES) | ||||||
|             father = CachedReferenceField('self', fields=('tp',)) |             father = CachedReferenceField("self", fields=("tp",)) | ||||||
|  |  | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|  |  | ||||||
|         a1 = Person(name="Wilson Father", tp="pj") |         a1 = Person(name="Wilson Father", tp="pj") | ||||||
|         a1.save() |         a1.save() | ||||||
|  |  | ||||||
|         a2 = Person(name='Wilson Junior', tp='pf', father=a1) |         a2 = Person(name="Wilson Junior", tp="pf", father=a1) | ||||||
|         a2.save() |         a2.save() | ||||||
|  |  | ||||||
|         a2 = Person.objects.with_id(a2.id) |         a2 = Person.objects.with_id(a2.id) | ||||||
|         self.assertEqual(a2.father.tp, a1.tp) |         assert a2.father.tp == a1.tp | ||||||
|  |  | ||||||
|         self.assertEqual(dict(a2.to_mongo()), { |         assert dict(a2.to_mongo()) == { | ||||||
|             "_id": a2.pk, |             "_id": a2.pk, | ||||||
|             "name": u"Wilson Junior", |             "name": u"Wilson Junior", | ||||||
|             "tp": u"pf", |             "tp": u"pf", | ||||||
|             "father": { |             "father": {"_id": a1.pk, "tp": u"pj"}, | ||||||
|                 "_id": a1.pk, |         } | ||||||
|                 "tp": u"pj" |  | ||||||
|             } |  | ||||||
|         }) |  | ||||||
|  |  | ||||||
|         self.assertEqual(Person.objects(father=a1)._query, { |         assert Person.objects(father=a1)._query == {"father._id": a1.pk} | ||||||
|             'father._id': a1.pk |         assert Person.objects(father=a1).count() == 1 | ||||||
|         }) |  | ||||||
|         self.assertEqual(Person.objects(father=a1).count(), 1) |  | ||||||
|  |  | ||||||
|         Person.objects.update(set__tp="pf") |         Person.objects.update(set__tp="pf") | ||||||
|         Person.father.sync_all() |         Person.father.sync_all() | ||||||
|  |  | ||||||
|         a2.reload() |         a2.reload() | ||||||
|         self.assertEqual(dict(a2.to_mongo()), { |         assert dict(a2.to_mongo()) == { | ||||||
|             "_id": a2.pk, |             "_id": a2.pk, | ||||||
|             "name": u"Wilson Junior", |             "name": u"Wilson Junior", | ||||||
|             "tp": u"pf", |             "tp": u"pf", | ||||||
|             "father": { |             "father": {"_id": a1.pk, "tp": u"pf"}, | ||||||
|                 "_id": a1.pk, |         } | ||||||
|                 "tp": u"pf" |  | ||||||
|             } |  | ||||||
|         }) |  | ||||||
|  |  | ||||||
|     def test_cached_reference_fields_on_embedded_documents(self): |     def test_cached_reference_fields_on_embedded_documents(self): | ||||||
|         with self.assertRaises(InvalidDocumentError): |         with pytest.raises(InvalidDocumentError): | ||||||
|  |  | ||||||
|             class Test(Document): |             class Test(Document): | ||||||
|                 name = StringField() |                 name = StringField() | ||||||
|  |  | ||||||
|             type('WrongEmbeddedDocument', ( |             type( | ||||||
|                 EmbeddedDocument,), { |                 "WrongEmbeddedDocument", | ||||||
|                     'test': CachedReferenceField(Test) |                 (EmbeddedDocument,), | ||||||
|             }) |                 {"test": CachedReferenceField(Test)}, | ||||||
|  |             ) | ||||||
|  |  | ||||||
|     def test_cached_reference_auto_sync(self): |     def test_cached_reference_auto_sync(self): | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             TYPES = ( |             TYPES = (("pf", "PF"), ("pj", "PJ")) | ||||||
|                 ('pf', "PF"), |  | ||||||
|                 ('pj', "PJ") |  | ||||||
|             ) |  | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             tp = StringField( |             tp = StringField(choices=TYPES) | ||||||
|                 choices=TYPES |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|             father = CachedReferenceField('self', fields=('tp',)) |             father = CachedReferenceField("self", fields=("tp",)) | ||||||
|  |  | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|  |  | ||||||
|         a1 = Person(name="Wilson Father", tp="pj") |         a1 = Person(name="Wilson Father", tp="pj") | ||||||
|         a1.save() |         a1.save() | ||||||
|  |  | ||||||
|         a2 = Person(name='Wilson Junior', tp='pf', father=a1) |         a2 = Person(name="Wilson Junior", tp="pf", father=a1) | ||||||
|         a2.save() |         a2.save() | ||||||
|  |  | ||||||
|         a1.tp = 'pf' |         a1.tp = "pf" | ||||||
|         a1.save() |         a1.save() | ||||||
|  |  | ||||||
|         a2.reload() |         a2.reload() | ||||||
|         self.assertEqual(dict(a2.to_mongo()), { |         assert dict(a2.to_mongo()) == { | ||||||
|             '_id': a2.pk, |             "_id": a2.pk, | ||||||
|             'name': 'Wilson Junior', |             "name": "Wilson Junior", | ||||||
|             'tp': 'pf', |             "tp": "pf", | ||||||
|             'father': { |             "father": {"_id": a1.pk, "tp": "pf"}, | ||||||
|                 '_id': a1.pk, |         } | ||||||
|                 'tp': 'pf' |  | ||||||
|             } |  | ||||||
|         }) |  | ||||||
|  |  | ||||||
|     def test_cached_reference_auto_sync_disabled(self): |     def test_cached_reference_auto_sync_disabled(self): | ||||||
|         class Persone(Document): |         class Persone(Document): | ||||||
|             TYPES = ( |             TYPES = (("pf", "PF"), ("pj", "PJ")) | ||||||
|                 ('pf', "PF"), |  | ||||||
|                 ('pj', "PJ") |  | ||||||
|             ) |  | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             tp = StringField( |             tp = StringField(choices=TYPES) | ||||||
|                 choices=TYPES |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|             father = CachedReferenceField( |             father = CachedReferenceField("self", fields=("tp",), auto_sync=False) | ||||||
|                 'self', fields=('tp',), auto_sync=False) |  | ||||||
|  |  | ||||||
|         Persone.drop_collection() |         Persone.drop_collection() | ||||||
|  |  | ||||||
|         a1 = Persone(name="Wilson Father", tp="pj") |         a1 = Persone(name="Wilson Father", tp="pj") | ||||||
|         a1.save() |         a1.save() | ||||||
|  |  | ||||||
|         a2 = Persone(name='Wilson Junior', tp='pf', father=a1) |         a2 = Persone(name="Wilson Junior", tp="pf", father=a1) | ||||||
|         a2.save() |         a2.save() | ||||||
|  |  | ||||||
|         a1.tp = 'pf' |         a1.tp = "pf" | ||||||
|         a1.save() |         a1.save() | ||||||
|  |  | ||||||
|         self.assertEqual(Persone.objects._collection.find_one({'_id': a2.pk}), { |         assert Persone.objects._collection.find_one({"_id": a2.pk}) == { | ||||||
|             '_id': a2.pk, |             "_id": a2.pk, | ||||||
|             'name': 'Wilson Junior', |             "name": "Wilson Junior", | ||||||
|             'tp': 'pf', |             "tp": "pf", | ||||||
|             'father': { |             "father": {"_id": a1.pk, "tp": "pj"}, | ||||||
|                 '_id': a1.pk, |         } | ||||||
|                 'tp': 'pj' |  | ||||||
|             } |  | ||||||
|         }) |  | ||||||
|  |  | ||||||
|     def test_cached_reference_embedded_fields(self): |     def test_cached_reference_embedded_fields(self): | ||||||
|         class Owner(EmbeddedDocument): |         class Owner(EmbeddedDocument): | ||||||
|             TPS = ( |             TPS = (("n", "Normal"), ("u", "Urgent")) | ||||||
|                 ('n', "Normal"), |  | ||||||
|                 ('u', "Urgent") |  | ||||||
|             ) |  | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             tp = StringField( |             tp = StringField(verbose_name="Type", db_field="t", choices=TPS) | ||||||
|                 verbose_name="Type", |  | ||||||
|                 db_field="t", |  | ||||||
|                 choices=TPS) |  | ||||||
|  |  | ||||||
|         class Animal(Document): |         class Animal(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
| @@ -351,45 +289,41 @@ class TestCachedReferenceField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         class Ocorrence(Document): |         class Ocorrence(Document): | ||||||
|             person = StringField() |             person = StringField() | ||||||
|             animal = CachedReferenceField( |             animal = CachedReferenceField(Animal, fields=["tag", "owner.tp"]) | ||||||
|                 Animal, fields=['tag', 'owner.tp']) |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |         Animal.drop_collection() | ||||||
|         Ocorrence.drop_collection() |         Ocorrence.drop_collection() | ||||||
|  |  | ||||||
|         a = Animal(name="Leopard", tag="heavy", |         a = Animal( | ||||||
|                    owner=Owner(tp='u', name="Wilson Júnior") |             name="Leopard", tag="heavy", owner=Owner(tp="u", name="Wilson Júnior") | ||||||
|                    ) |         ) | ||||||
|         a.save() |         a.save() | ||||||
|  |  | ||||||
|         o = Ocorrence(person="teste", animal=a) |         o = Ocorrence(person="teste", animal=a) | ||||||
|         o.save() |         o.save() | ||||||
|         self.assertEqual(dict(a.to_mongo(fields=['tag', 'owner.tp'])), { |         assert dict(a.to_mongo(fields=["tag", "owner.tp"])) == { | ||||||
|             '_id': a.pk, |             "_id": a.pk, | ||||||
|             'tag': 'heavy', |             "tag": "heavy", | ||||||
|             'owner': { |             "owner": {"t": "u"}, | ||||||
|                 't': 'u' |         } | ||||||
|             } |         assert o.to_mongo()["animal"]["tag"] == "heavy" | ||||||
|         }) |         assert o.to_mongo()["animal"]["owner"]["t"] == "u" | ||||||
|         self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy') |  | ||||||
|         self.assertEqual(o.to_mongo()['animal']['owner']['t'], 'u') |  | ||||||
|  |  | ||||||
|         # Check to_mongo with fields |         # Check to_mongo with fields | ||||||
|         self.assertNotIn('animal', o.to_mongo(fields=['person'])) |         assert "animal" not in o.to_mongo(fields=["person"]) | ||||||
|  |  | ||||||
|         # counts |         # counts | ||||||
|         Ocorrence(person="teste 2").save() |         Ocorrence(person="teste 2").save() | ||||||
|         Ocorrence(person="teste 3").save() |         Ocorrence(person="teste 3").save() | ||||||
|  |  | ||||||
|         count = Ocorrence.objects( |         count = Ocorrence.objects(animal__tag="heavy", animal__owner__tp="u").count() | ||||||
|             animal__tag='heavy', animal__owner__tp='u').count() |         assert count == 1 | ||||||
|         self.assertEqual(count, 1) |  | ||||||
|  |  | ||||||
|         ocorrence = Ocorrence.objects( |         ocorrence = Ocorrence.objects( | ||||||
|             animal__tag='heavy', |             animal__tag="heavy", animal__owner__tp="u" | ||||||
|             animal__owner__tp='u').first() |         ).first() | ||||||
|         self.assertEqual(ocorrence.person, "teste") |         assert ocorrence.person == "teste" | ||||||
|         self.assertIsInstance(ocorrence.animal, Animal) |         assert isinstance(ocorrence.animal, Animal) | ||||||
|  |  | ||||||
|     def test_cached_reference_embedded_list_fields(self): |     def test_cached_reference_embedded_list_fields(self): | ||||||
|         class Owner(EmbeddedDocument): |         class Owner(EmbeddedDocument): | ||||||
| @@ -404,43 +338,40 @@ class TestCachedReferenceField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         class Ocorrence(Document): |         class Ocorrence(Document): | ||||||
|             person = StringField() |             person = StringField() | ||||||
|             animal = CachedReferenceField( |             animal = CachedReferenceField(Animal, fields=["tag", "owner.tags"]) | ||||||
|                 Animal, fields=['tag', 'owner.tags']) |  | ||||||
|  |  | ||||||
|         Animal.drop_collection() |         Animal.drop_collection() | ||||||
|         Ocorrence.drop_collection() |         Ocorrence.drop_collection() | ||||||
|  |  | ||||||
|         a = Animal(name="Leopard", tag="heavy", |         a = Animal( | ||||||
|                    owner=Owner(tags=['cool', 'funny'], |             name="Leopard", | ||||||
|                                name="Wilson Júnior") |             tag="heavy", | ||||||
|                    ) |             owner=Owner(tags=["cool", "funny"], name="Wilson Júnior"), | ||||||
|  |         ) | ||||||
|         a.save() |         a.save() | ||||||
|  |  | ||||||
|         o = Ocorrence(person="teste 2", animal=a) |         o = Ocorrence(person="teste 2", animal=a) | ||||||
|         o.save() |         o.save() | ||||||
|         self.assertEqual(dict(a.to_mongo(fields=['tag', 'owner.tags'])), { |         assert dict(a.to_mongo(fields=["tag", "owner.tags"])) == { | ||||||
|             '_id': a.pk, |             "_id": a.pk, | ||||||
|             'tag': 'heavy', |             "tag": "heavy", | ||||||
|             'owner': { |             "owner": {"tags": ["cool", "funny"]}, | ||||||
|                 'tags': ['cool', 'funny'] |         } | ||||||
|             } |  | ||||||
|         }) |  | ||||||
|  |  | ||||||
|         self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy') |         assert o.to_mongo()["animal"]["tag"] == "heavy" | ||||||
|         self.assertEqual(o.to_mongo()['animal']['owner']['tags'], |         assert o.to_mongo()["animal"]["owner"]["tags"] == ["cool", "funny"] | ||||||
|                          ['cool', 'funny']) |  | ||||||
|  |  | ||||||
|         # counts |         # counts | ||||||
|         Ocorrence(person="teste 2").save() |         Ocorrence(person="teste 2").save() | ||||||
|         Ocorrence(person="teste 3").save() |         Ocorrence(person="teste 3").save() | ||||||
|  |  | ||||||
|         query = Ocorrence.objects( |         query = Ocorrence.objects( | ||||||
|             animal__tag='heavy', animal__owner__tags='cool')._query |             animal__tag="heavy", animal__owner__tags="cool" | ||||||
|         self.assertEqual( |         )._query | ||||||
|             query, {'animal.owner.tags': 'cool', 'animal.tag': 'heavy'}) |         assert query == {"animal.owner.tags": "cool", "animal.tag": "heavy"} | ||||||
|  |  | ||||||
|         ocorrence = Ocorrence.objects( |         ocorrence = Ocorrence.objects( | ||||||
|             animal__tag='heavy', |             animal__tag="heavy", animal__owner__tags="cool" | ||||||
|             animal__owner__tags='cool').first() |         ).first() | ||||||
|         self.assertEqual(ocorrence.person, "teste 2") |         assert ocorrence.person == "teste 2" | ||||||
|         self.assertIsInstance(ocorrence.animal, Animal) |         assert isinstance(ocorrence.animal, Animal) | ||||||
|   | |||||||
| @@ -1,9 +1,11 @@ | |||||||
| # -*- coding: utf-8 -*- | # -*- coding: utf-8 -*- | ||||||
| import datetime | import datetime | ||||||
| import math |  | ||||||
| import itertools | import itertools | ||||||
|  | import math | ||||||
| import re | import re | ||||||
|  |  | ||||||
|  | import pytest | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase | from tests.utils import MongoDBTestCase | ||||||
| @@ -14,9 +16,10 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): | |||||||
|         """Tests for complex datetime fields - which can handle |         """Tests for complex datetime fields - which can handle | ||||||
|         microseconds without rounding. |         microseconds without rounding. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class LogEntry(Document): |         class LogEntry(Document): | ||||||
|             date = ComplexDateTimeField() |             date = ComplexDateTimeField() | ||||||
|             date_with_dots = ComplexDateTimeField(separator='.') |             date_with_dots = ComplexDateTimeField(separator=".") | ||||||
|  |  | ||||||
|         LogEntry.drop_collection() |         LogEntry.drop_collection() | ||||||
|  |  | ||||||
| @@ -27,7 +30,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): | |||||||
|         log.date = d1 |         log.date = d1 | ||||||
|         log.save() |         log.save() | ||||||
|         log.reload() |         log.reload() | ||||||
|         self.assertEqual(log.date, d1) |         assert log.date == d1 | ||||||
|  |  | ||||||
|         # Post UTC - microseconds are rounded (down) nearest millisecond - with |         # Post UTC - microseconds are rounded (down) nearest millisecond - with | ||||||
|         # default datetimefields |         # default datetimefields | ||||||
| @@ -35,7 +38,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): | |||||||
|         log.date = d1 |         log.date = d1 | ||||||
|         log.save() |         log.save() | ||||||
|         log.reload() |         log.reload() | ||||||
|         self.assertEqual(log.date, d1) |         assert log.date == d1 | ||||||
|  |  | ||||||
|         # Pre UTC dates microseconds below 1000 are dropped - with default |         # Pre UTC dates microseconds below 1000 are dropped - with default | ||||||
|         # datetimefields |         # datetimefields | ||||||
| @@ -43,7 +46,7 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): | |||||||
|         log.date = d1 |         log.date = d1 | ||||||
|         log.save() |         log.save() | ||||||
|         log.reload() |         log.reload() | ||||||
|         self.assertEqual(log.date, d1) |         assert log.date == d1 | ||||||
|  |  | ||||||
|         # Pre UTC microseconds above 1000 is wonky - with default datetimefields |         # Pre UTC microseconds above 1000 is wonky - with default datetimefields | ||||||
|         # log.date has an invalid microsecond value so I can't construct |         # log.date has an invalid microsecond value so I can't construct | ||||||
| @@ -53,26 +56,34 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): | |||||||
|             log.date = d1 |             log.date = d1 | ||||||
|             log.save() |             log.save() | ||||||
|             log.reload() |             log.reload() | ||||||
|             self.assertEqual(log.date, d1) |             assert log.date == d1 | ||||||
|             log1 = LogEntry.objects.get(date=d1) |             log1 = LogEntry.objects.get(date=d1) | ||||||
|             self.assertEqual(log, log1) |             assert log == log1 | ||||||
|  |  | ||||||
|         # Test string padding |         # Test string padding | ||||||
|         microsecond = map(int, [math.pow(10, x) for x in range(6)]) |         microsecond = map(int, [math.pow(10, x) for x in range(6)]) | ||||||
|         mm = dd = hh = ii = ss = [1, 10] |         mm = dd = hh = ii = ss = [1, 10] | ||||||
|  |  | ||||||
|         for values in itertools.product([2014], mm, dd, hh, ii, ss, microsecond): |         for values in itertools.product([2014], mm, dd, hh, ii, ss, microsecond): | ||||||
|             stored = LogEntry(date=datetime.datetime(*values)).to_mongo()['date'] |             stored = LogEntry(date=datetime.datetime(*values)).to_mongo()["date"] | ||||||
|             self.assertTrue(re.match('^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$', stored) is not None) |             assert ( | ||||||
|  |                 re.match(r"^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$", stored) | ||||||
|  |                 is not None | ||||||
|  |             ) | ||||||
|  |  | ||||||
|         # Test separator |         # Test separator | ||||||
|         stored = LogEntry(date_with_dots=datetime.datetime(2014, 1, 1)).to_mongo()['date_with_dots'] |         stored = LogEntry(date_with_dots=datetime.datetime(2014, 1, 1)).to_mongo()[ | ||||||
|         self.assertTrue(re.match('^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$', stored) is not None) |             "date_with_dots" | ||||||
|  |         ] | ||||||
|  |         assert ( | ||||||
|  |             re.match(r"^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$", stored) is not None | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     def test_complexdatetime_usage(self): |     def test_complexdatetime_usage(self): | ||||||
|         """Tests for complex datetime fields - which can handle |         """Tests for complex datetime fields - which can handle | ||||||
|         microseconds without rounding. |         microseconds without rounding. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class LogEntry(Document): |         class LogEntry(Document): | ||||||
|             date = ComplexDateTimeField() |             date = ComplexDateTimeField() | ||||||
|  |  | ||||||
| @@ -84,62 +95,61 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): | |||||||
|         log.save() |         log.save() | ||||||
|  |  | ||||||
|         log1 = LogEntry.objects.get(date=d1) |         log1 = LogEntry.objects.get(date=d1) | ||||||
|         self.assertEqual(log, log1) |         assert log == log1 | ||||||
|  |  | ||||||
|         # create extra 59 log entries for a total of 60 |         # create extra 59 log entries for a total of 60 | ||||||
|         for i in range(1951, 2010): |         for i in range(1951, 2010): | ||||||
|             d = datetime.datetime(i, 1, 1, 0, 0, 1, 999) |             d = datetime.datetime(i, 1, 1, 0, 0, 1, 999) | ||||||
|             LogEntry(date=d).save() |             LogEntry(date=d).save() | ||||||
|  |  | ||||||
|         self.assertEqual(LogEntry.objects.count(), 60) |         assert LogEntry.objects.count() == 60 | ||||||
|  |  | ||||||
|         # Test ordering |         # Test ordering | ||||||
|         logs = LogEntry.objects.order_by("date") |         logs = LogEntry.objects.order_by("date") | ||||||
|         i = 0 |         i = 0 | ||||||
|         while i < 59: |         while i < 59: | ||||||
|             self.assertTrue(logs[i].date <= logs[i + 1].date) |             assert logs[i].date <= logs[i + 1].date | ||||||
|             i += 1 |             i += 1 | ||||||
|  |  | ||||||
|         logs = LogEntry.objects.order_by("-date") |         logs = LogEntry.objects.order_by("-date") | ||||||
|         i = 0 |         i = 0 | ||||||
|         while i < 59: |         while i < 59: | ||||||
|             self.assertTrue(logs[i].date >= logs[i + 1].date) |             assert logs[i].date >= logs[i + 1].date | ||||||
|             i += 1 |             i += 1 | ||||||
|  |  | ||||||
|         # Test searching |         # Test searching | ||||||
|         logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1)) |         logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1)) | ||||||
|         self.assertEqual(logs.count(), 30) |         assert logs.count() == 30 | ||||||
|  |  | ||||||
|         logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1)) |         logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1)) | ||||||
|         self.assertEqual(logs.count(), 30) |         assert logs.count() == 30 | ||||||
|  |  | ||||||
|         logs = LogEntry.objects.filter( |         logs = LogEntry.objects.filter( | ||||||
|             date__lte=datetime.datetime(2011, 1, 1), |             date__lte=datetime.datetime(2011, 1, 1), | ||||||
|             date__gte=datetime.datetime(2000, 1, 1), |             date__gte=datetime.datetime(2000, 1, 1), | ||||||
|         ) |         ) | ||||||
|         self.assertEqual(logs.count(), 10) |         assert logs.count() == 10 | ||||||
|  |  | ||||||
|         LogEntry.drop_collection() |         LogEntry.drop_collection() | ||||||
|  |  | ||||||
|         # Test microsecond-level ordering/filtering |         # Test microsecond-level ordering/filtering | ||||||
|         for microsecond in (99, 999, 9999, 10000): |         for microsecond in (99, 999, 9999, 10000): | ||||||
|             LogEntry( |             LogEntry(date=datetime.datetime(2015, 1, 1, 0, 0, 0, microsecond)).save() | ||||||
|                 date=datetime.datetime(2015, 1, 1, 0, 0, 0, microsecond) |  | ||||||
|             ).save() |  | ||||||
|  |  | ||||||
|         logs = list(LogEntry.objects.order_by('date')) |         logs = list(LogEntry.objects.order_by("date")) | ||||||
|         for next_idx, log in enumerate(logs[:-1], start=1): |         for next_idx, log in enumerate(logs[:-1], start=1): | ||||||
|             next_log = logs[next_idx] |             next_log = logs[next_idx] | ||||||
|             self.assertTrue(log.date < next_log.date) |             assert log.date < next_log.date | ||||||
|  |  | ||||||
|         logs = list(LogEntry.objects.order_by('-date')) |         logs = list(LogEntry.objects.order_by("-date")) | ||||||
|         for next_idx, log in enumerate(logs[:-1], start=1): |         for next_idx, log in enumerate(logs[:-1], start=1): | ||||||
|             next_log = logs[next_idx] |             next_log = logs[next_idx] | ||||||
|             self.assertTrue(log.date > next_log.date) |             assert log.date > next_log.date | ||||||
|  |  | ||||||
|         logs = LogEntry.objects.filter( |         logs = LogEntry.objects.filter( | ||||||
|             date__lte=datetime.datetime(2015, 1, 1, 0, 0, 0, 10000)) |             date__lte=datetime.datetime(2015, 1, 1, 0, 0, 0, 10000) | ||||||
|         self.assertEqual(logs.count(), 4) |         ) | ||||||
|  |         assert logs.count() == 4 | ||||||
|  |  | ||||||
|     def test_no_default_value(self): |     def test_no_default_value(self): | ||||||
|         class Log(Document): |         class Log(Document): | ||||||
| @@ -148,25 +158,26 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): | |||||||
|         Log.drop_collection() |         Log.drop_collection() | ||||||
|  |  | ||||||
|         log = Log() |         log = Log() | ||||||
|         self.assertIsNone(log.timestamp) |         assert log.timestamp is None | ||||||
|         log.save() |         log.save() | ||||||
|  |  | ||||||
|         fetched_log = Log.objects.with_id(log.id) |         fetched_log = Log.objects.with_id(log.id) | ||||||
|         self.assertIsNone(fetched_log.timestamp) |         assert fetched_log.timestamp is None | ||||||
|  |  | ||||||
|     def test_default_static_value(self): |     def test_default_static_value(self): | ||||||
|         NOW = datetime.datetime.utcnow() |         NOW = datetime.datetime.utcnow() | ||||||
|  |  | ||||||
|         class Log(Document): |         class Log(Document): | ||||||
|             timestamp = ComplexDateTimeField(default=NOW) |             timestamp = ComplexDateTimeField(default=NOW) | ||||||
|  |  | ||||||
|         Log.drop_collection() |         Log.drop_collection() | ||||||
|  |  | ||||||
|         log = Log() |         log = Log() | ||||||
|         self.assertEqual(log.timestamp, NOW) |         assert log.timestamp == NOW | ||||||
|         log.save() |         log.save() | ||||||
|  |  | ||||||
|         fetched_log = Log.objects.with_id(log.id) |         fetched_log = Log.objects.with_id(log.id) | ||||||
|         self.assertEqual(fetched_log.timestamp, NOW) |         assert fetched_log.timestamp == NOW | ||||||
|  |  | ||||||
|     def test_default_callable(self): |     def test_default_callable(self): | ||||||
|         NOW = datetime.datetime.utcnow() |         NOW = datetime.datetime.utcnow() | ||||||
| @@ -177,8 +188,23 @@ class ComplexDateTimeFieldTest(MongoDBTestCase): | |||||||
|         Log.drop_collection() |         Log.drop_collection() | ||||||
|  |  | ||||||
|         log = Log() |         log = Log() | ||||||
|         self.assertGreaterEqual(log.timestamp, NOW) |         assert log.timestamp >= NOW | ||||||
|         log.save() |         log.save() | ||||||
|  |  | ||||||
|         fetched_log = Log.objects.with_id(log.id) |         fetched_log = Log.objects.with_id(log.id) | ||||||
|         self.assertGreaterEqual(fetched_log.timestamp, NOW) |         assert fetched_log.timestamp >= NOW | ||||||
|  |  | ||||||
|  |     def test_setting_bad_value_does_not_raise_unless_validate_is_called(self): | ||||||
|  |         # test regression of #2253 | ||||||
|  |  | ||||||
|  |         class Log(Document): | ||||||
|  |             timestamp = ComplexDateTimeField() | ||||||
|  |  | ||||||
|  |         Log.drop_collection() | ||||||
|  |  | ||||||
|  |         log = Log(timestamp="garbage") | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             log.validate() | ||||||
|  |  | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             log.save() | ||||||
|   | |||||||
| @@ -1,6 +1,7 @@ | |||||||
| # -*- coding: utf-8 -*- | # -*- coding: utf-8 -*- | ||||||
| import datetime | import datetime | ||||||
| import six |  | ||||||
|  | import pytest | ||||||
|  |  | ||||||
| try: | try: | ||||||
|     import dateutil |     import dateutil | ||||||
| @@ -8,7 +9,6 @@ except ImportError: | |||||||
|     dateutil = None |     dateutil = None | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -18,41 +18,47 @@ class TestDateField(MongoDBTestCase): | |||||||
|         Ensure an exception is raised when trying to |         Ensure an exception is raised when trying to | ||||||
|         cast an empty string to datetime. |         cast an empty string to datetime. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class MyDoc(Document): |         class MyDoc(Document): | ||||||
|             dt = DateField() |             dt = DateField() | ||||||
|  |  | ||||||
|         md = MyDoc(dt='') |         md = MyDoc(dt="") | ||||||
|         self.assertRaises(ValidationError, md.save) |         with pytest.raises(ValidationError): | ||||||
|  |             md.save() | ||||||
|  |  | ||||||
|     def test_date_from_whitespace_string(self): |     def test_date_from_whitespace_string(self): | ||||||
|         """ |         """ | ||||||
|         Ensure an exception is raised when trying to |         Ensure an exception is raised when trying to | ||||||
|         cast a whitespace-only string to datetime. |         cast a whitespace-only string to datetime. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class MyDoc(Document): |         class MyDoc(Document): | ||||||
|             dt = DateField() |             dt = DateField() | ||||||
|  |  | ||||||
|         md = MyDoc(dt='   ') |         md = MyDoc(dt="   ") | ||||||
|         self.assertRaises(ValidationError, md.save) |         with pytest.raises(ValidationError): | ||||||
|  |             md.save() | ||||||
|  |  | ||||||
|     def test_default_values_today(self): |     def test_default_values_today(self): | ||||||
|         """Ensure that default field values are used when creating |         """Ensure that default field values are used when creating | ||||||
|         a document. |         a document. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             day = DateField(default=datetime.date.today) |             day = DateField(default=datetime.date.today) | ||||||
|  |  | ||||||
|         person = Person() |         person = Person() | ||||||
|         person.validate() |         person.validate() | ||||||
|         self.assertEqual(person.day, person.day) |         assert person.day == person.day | ||||||
|         self.assertEqual(person.day, datetime.date.today()) |         assert person.day == datetime.date.today() | ||||||
|         self.assertEqual(person._data['day'], person.day) |         assert person._data["day"] == person.day | ||||||
|  |  | ||||||
|     def test_date(self): |     def test_date(self): | ||||||
|         """Tests showing pymongo date fields |         """Tests showing pymongo date fields | ||||||
|  |  | ||||||
|         See: http://api.mongodb.org/python/current/api/bson/son.html#dt |         See: http://api.mongodb.org/python/current/api/bson/son.html#dt | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class LogEntry(Document): |         class LogEntry(Document): | ||||||
|             date = DateField() |             date = DateField() | ||||||
|  |  | ||||||
| @@ -63,7 +69,7 @@ class TestDateField(MongoDBTestCase): | |||||||
|         log.date = datetime.date.today() |         log.date = datetime.date.today() | ||||||
|         log.save() |         log.save() | ||||||
|         log.reload() |         log.reload() | ||||||
|         self.assertEqual(log.date, datetime.date.today()) |         assert log.date == datetime.date.today() | ||||||
|  |  | ||||||
|         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999) |         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999) | ||||||
|         d2 = datetime.datetime(1970, 1, 1, 0, 0, 1) |         d2 = datetime.datetime(1970, 1, 1, 0, 0, 1) | ||||||
| @@ -71,30 +77,20 @@ class TestDateField(MongoDBTestCase): | |||||||
|         log.date = d1 |         log.date = d1 | ||||||
|         log.save() |         log.save() | ||||||
|         log.reload() |         log.reload() | ||||||
|         self.assertEqual(log.date, d1.date()) |         assert log.date == d1.date() | ||||||
|         self.assertEqual(log.date, d2.date()) |         assert log.date == d2.date() | ||||||
|  |  | ||||||
|         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999) |         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999) | ||||||
|         d2 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9000) |         d2 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9000) | ||||||
|         log.date = d1 |         log.date = d1 | ||||||
|         log.save() |         log.save() | ||||||
|         log.reload() |         log.reload() | ||||||
|         self.assertEqual(log.date, d1.date()) |         assert log.date == d1.date() | ||||||
|         self.assertEqual(log.date, d2.date()) |         assert log.date == d2.date() | ||||||
|  |  | ||||||
|         if not six.PY3: |  | ||||||
|             # Pre UTC dates microseconds below 1000 are dropped |  | ||||||
|             # This does not seem to be true in PY3 |  | ||||||
|             d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) |  | ||||||
|             d2 = datetime.datetime(1969, 12, 31, 23, 59, 59) |  | ||||||
|             log.date = d1 |  | ||||||
|             log.save() |  | ||||||
|             log.reload() |  | ||||||
|             self.assertEqual(log.date, d1.date()) |  | ||||||
|             self.assertEqual(log.date, d2.date()) |  | ||||||
|  |  | ||||||
|     def test_regular_usage(self): |     def test_regular_usage(self): | ||||||
|         """Tests for regular datetime fields""" |         """Tests for regular datetime fields""" | ||||||
|  |  | ||||||
|         class LogEntry(Document): |         class LogEntry(Document): | ||||||
|             date = DateField() |             date = DateField() | ||||||
|  |  | ||||||
| @@ -106,42 +102,43 @@ class TestDateField(MongoDBTestCase): | |||||||
|         log.validate() |         log.validate() | ||||||
|         log.save() |         log.save() | ||||||
|  |  | ||||||
|         for query in (d1, d1.isoformat(' ')): |         for query in (d1, d1.isoformat(" ")): | ||||||
|             log1 = LogEntry.objects.get(date=query) |             log1 = LogEntry.objects.get(date=query) | ||||||
|             self.assertEqual(log, log1) |             assert log == log1 | ||||||
|  |  | ||||||
|         if dateutil: |         if dateutil: | ||||||
|             log1 = LogEntry.objects.get(date=d1.isoformat('T')) |             log1 = LogEntry.objects.get(date=d1.isoformat("T")) | ||||||
|             self.assertEqual(log, log1) |             assert log == log1 | ||||||
|  |  | ||||||
|         # create additional 19 log entries for a total of 20 |         # create additional 19 log entries for a total of 20 | ||||||
|         for i in range(1971, 1990): |         for i in range(1971, 1990): | ||||||
|             d = datetime.datetime(i, 1, 1, 0, 0, 1) |             d = datetime.datetime(i, 1, 1, 0, 0, 1) | ||||||
|             LogEntry(date=d).save() |             LogEntry(date=d).save() | ||||||
|  |  | ||||||
|         self.assertEqual(LogEntry.objects.count(), 20) |         assert LogEntry.objects.count() == 20 | ||||||
|  |  | ||||||
|         # Test ordering |         # Test ordering | ||||||
|         logs = LogEntry.objects.order_by("date") |         logs = LogEntry.objects.order_by("date") | ||||||
|         i = 0 |         i = 0 | ||||||
|         while i < 19: |         while i < 19: | ||||||
|             self.assertTrue(logs[i].date <= logs[i + 1].date) |             assert logs[i].date <= logs[i + 1].date | ||||||
|             i += 1 |             i += 1 | ||||||
|  |  | ||||||
|         logs = LogEntry.objects.order_by("-date") |         logs = LogEntry.objects.order_by("-date") | ||||||
|         i = 0 |         i = 0 | ||||||
|         while i < 19: |         while i < 19: | ||||||
|             self.assertTrue(logs[i].date >= logs[i + 1].date) |             assert logs[i].date >= logs[i + 1].date | ||||||
|             i += 1 |             i += 1 | ||||||
|  |  | ||||||
|         # Test searching |         # Test searching | ||||||
|         logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1)) |         logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1)) | ||||||
|         self.assertEqual(logs.count(), 10) |         assert logs.count() == 10 | ||||||
|  |  | ||||||
|     def test_validation(self): |     def test_validation(self): | ||||||
|         """Ensure that invalid values cannot be assigned to datetime |         """Ensure that invalid values cannot be assigned to datetime | ||||||
|         fields. |         fields. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class LogEntry(Document): |         class LogEntry(Document): | ||||||
|             time = DateField() |             time = DateField() | ||||||
|  |  | ||||||
| @@ -152,14 +149,16 @@ class TestDateField(MongoDBTestCase): | |||||||
|         log.time = datetime.date.today() |         log.time = datetime.date.today() | ||||||
|         log.validate() |         log.validate() | ||||||
|  |  | ||||||
|         log.time = datetime.datetime.now().isoformat(' ') |         log.time = datetime.datetime.now().isoformat(" ") | ||||||
|         log.validate() |         log.validate() | ||||||
|  |  | ||||||
|         if dateutil: |         if dateutil: | ||||||
|             log.time = datetime.datetime.now().isoformat('T') |             log.time = datetime.datetime.now().isoformat("T") | ||||||
|             log.validate() |             log.validate() | ||||||
|  |  | ||||||
|         log.time = -1 |         log.time = -1 | ||||||
|         self.assertRaises(ValidationError, log.validate) |         with pytest.raises(ValidationError): | ||||||
|         log.time = 'ABC' |             log.validate() | ||||||
|         self.assertRaises(ValidationError, log.validate) |         log.time = "ABC" | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             log.validate() | ||||||
|   | |||||||
| @@ -1,6 +1,7 @@ | |||||||
| # -*- coding: utf-8 -*- | # -*- coding: utf-8 -*- | ||||||
| import datetime as dt | import datetime as dt | ||||||
| import six |  | ||||||
|  | import pytest | ||||||
|  |  | ||||||
| try: | try: | ||||||
|     import dateutil |     import dateutil | ||||||
| @@ -19,27 +20,32 @@ class TestDateTimeField(MongoDBTestCase): | |||||||
|         Ensure an exception is raised when trying to |         Ensure an exception is raised when trying to | ||||||
|         cast an empty string to datetime. |         cast an empty string to datetime. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class MyDoc(Document): |         class MyDoc(Document): | ||||||
|             dt = DateTimeField() |             dt = DateTimeField() | ||||||
|  |  | ||||||
|         md = MyDoc(dt='') |         md = MyDoc(dt="") | ||||||
|         self.assertRaises(ValidationError, md.save) |         with pytest.raises(ValidationError): | ||||||
|  |             md.save() | ||||||
|  |  | ||||||
|     def test_datetime_from_whitespace_string(self): |     def test_datetime_from_whitespace_string(self): | ||||||
|         """ |         """ | ||||||
|         Ensure an exception is raised when trying to |         Ensure an exception is raised when trying to | ||||||
|         cast a whitespace-only string to datetime. |         cast a whitespace-only string to datetime. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class MyDoc(Document): |         class MyDoc(Document): | ||||||
|             dt = DateTimeField() |             dt = DateTimeField() | ||||||
|  |  | ||||||
|         md = MyDoc(dt='   ') |         md = MyDoc(dt="   ") | ||||||
|         self.assertRaises(ValidationError, md.save) |         with pytest.raises(ValidationError): | ||||||
|  |             md.save() | ||||||
|  |  | ||||||
|     def test_default_value_utcnow(self): |     def test_default_value_utcnow(self): | ||||||
|         """Ensure that default field values are used when creating |         """Ensure that default field values are used when creating | ||||||
|         a document. |         a document. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             created = DateTimeField(default=dt.datetime.utcnow) |             created = DateTimeField(default=dt.datetime.utcnow) | ||||||
|  |  | ||||||
| @@ -47,9 +53,9 @@ class TestDateTimeField(MongoDBTestCase): | |||||||
|         person = Person() |         person = Person() | ||||||
|         person.validate() |         person.validate() | ||||||
|         person_created_t0 = person.created |         person_created_t0 = person.created | ||||||
|         self.assertLess(person.created - utcnow, dt.timedelta(seconds=1)) |         assert person.created - utcnow < dt.timedelta(seconds=1) | ||||||
|         self.assertEqual(person_created_t0, person.created)  # make sure it does not change |         assert person_created_t0 == person.created  # make sure it does not change | ||||||
|         self.assertEqual(person._data['created'], person.created) |         assert person._data["created"] == person.created | ||||||
|  |  | ||||||
|     def test_handling_microseconds(self): |     def test_handling_microseconds(self): | ||||||
|         """Tests showing pymongo datetime fields handling of microseconds. |         """Tests showing pymongo datetime fields handling of microseconds. | ||||||
| @@ -58,6 +64,7 @@ class TestDateTimeField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         See: http://api.mongodb.org/python/current/api/bson/son.html#dt |         See: http://api.mongodb.org/python/current/api/bson/son.html#dt | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class LogEntry(Document): |         class LogEntry(Document): | ||||||
|             date = DateTimeField() |             date = DateTimeField() | ||||||
|  |  | ||||||
| @@ -68,7 +75,7 @@ class TestDateTimeField(MongoDBTestCase): | |||||||
|         log.date = dt.date.today() |         log.date = dt.date.today() | ||||||
|         log.save() |         log.save() | ||||||
|         log.reload() |         log.reload() | ||||||
|         self.assertEqual(log.date.date(), dt.date.today()) |         assert log.date.date() == dt.date.today() | ||||||
|  |  | ||||||
|         # Post UTC - microseconds are rounded (down) nearest millisecond and |         # Post UTC - microseconds are rounded (down) nearest millisecond and | ||||||
|         # dropped |         # dropped | ||||||
| @@ -78,8 +85,8 @@ class TestDateTimeField(MongoDBTestCase): | |||||||
|         log.date = d1 |         log.date = d1 | ||||||
|         log.save() |         log.save() | ||||||
|         log.reload() |         log.reload() | ||||||
|         self.assertNotEqual(log.date, d1) |         assert log.date != d1 | ||||||
|         self.assertEqual(log.date, d2) |         assert log.date == d2 | ||||||
|  |  | ||||||
|         # Post UTC - microseconds are rounded (down) nearest millisecond |         # Post UTC - microseconds are rounded (down) nearest millisecond | ||||||
|         d1 = dt.datetime(1970, 1, 1, 0, 0, 1, 9999) |         d1 = dt.datetime(1970, 1, 1, 0, 0, 1, 9999) | ||||||
| @@ -87,22 +94,12 @@ class TestDateTimeField(MongoDBTestCase): | |||||||
|         log.date = d1 |         log.date = d1 | ||||||
|         log.save() |         log.save() | ||||||
|         log.reload() |         log.reload() | ||||||
|         self.assertNotEqual(log.date, d1) |         assert log.date != d1 | ||||||
|         self.assertEqual(log.date, d2) |         assert log.date == d2 | ||||||
|  |  | ||||||
|         if not six.PY3: |  | ||||||
|             # Pre UTC dates microseconds below 1000 are dropped |  | ||||||
|             # This does not seem to be true in PY3 |  | ||||||
|             d1 = dt.datetime(1969, 12, 31, 23, 59, 59, 999) |  | ||||||
|             d2 = dt.datetime(1969, 12, 31, 23, 59, 59) |  | ||||||
|             log.date = d1 |  | ||||||
|             log.save() |  | ||||||
|             log.reload() |  | ||||||
|             self.assertNotEqual(log.date, d1) |  | ||||||
|             self.assertEqual(log.date, d2) |  | ||||||
|  |  | ||||||
|     def test_regular_usage(self): |     def test_regular_usage(self): | ||||||
|         """Tests for regular datetime fields""" |         """Tests for regular datetime fields""" | ||||||
|  |  | ||||||
|         class LogEntry(Document): |         class LogEntry(Document): | ||||||
|             date = DateTimeField() |             date = DateTimeField() | ||||||
|  |  | ||||||
| @@ -114,51 +111,51 @@ class TestDateTimeField(MongoDBTestCase): | |||||||
|         log.validate() |         log.validate() | ||||||
|         log.save() |         log.save() | ||||||
|  |  | ||||||
|         for query in (d1, d1.isoformat(' ')): |         for query in (d1, d1.isoformat(" ")): | ||||||
|             log1 = LogEntry.objects.get(date=query) |             log1 = LogEntry.objects.get(date=query) | ||||||
|             self.assertEqual(log, log1) |             assert log == log1 | ||||||
|  |  | ||||||
|         if dateutil: |         if dateutil: | ||||||
|             log1 = LogEntry.objects.get(date=d1.isoformat('T')) |             log1 = LogEntry.objects.get(date=d1.isoformat("T")) | ||||||
|             self.assertEqual(log, log1) |             assert log == log1 | ||||||
|  |  | ||||||
|         # create additional 19 log entries for a total of 20 |         # create additional 19 log entries for a total of 20 | ||||||
|         for i in range(1971, 1990): |         for i in range(1971, 1990): | ||||||
|             d = dt.datetime(i, 1, 1, 0, 0, 1) |             d = dt.datetime(i, 1, 1, 0, 0, 1) | ||||||
|             LogEntry(date=d).save() |             LogEntry(date=d).save() | ||||||
|  |  | ||||||
|         self.assertEqual(LogEntry.objects.count(), 20) |         assert LogEntry.objects.count() == 20 | ||||||
|  |  | ||||||
|         # Test ordering |         # Test ordering | ||||||
|         logs = LogEntry.objects.order_by("date") |         logs = LogEntry.objects.order_by("date") | ||||||
|         i = 0 |         i = 0 | ||||||
|         while i < 19: |         while i < 19: | ||||||
|             self.assertTrue(logs[i].date <= logs[i + 1].date) |             assert logs[i].date <= logs[i + 1].date | ||||||
|             i += 1 |             i += 1 | ||||||
|  |  | ||||||
|         logs = LogEntry.objects.order_by("-date") |         logs = LogEntry.objects.order_by("-date") | ||||||
|         i = 0 |         i = 0 | ||||||
|         while i < 19: |         while i < 19: | ||||||
|             self.assertTrue(logs[i].date >= logs[i + 1].date) |             assert logs[i].date >= logs[i + 1].date | ||||||
|             i += 1 |             i += 1 | ||||||
|  |  | ||||||
|         # Test searching |         # Test searching | ||||||
|         logs = LogEntry.objects.filter(date__gte=dt.datetime(1980, 1, 1)) |         logs = LogEntry.objects.filter(date__gte=dt.datetime(1980, 1, 1)) | ||||||
|         self.assertEqual(logs.count(), 10) |         assert logs.count() == 10 | ||||||
|  |  | ||||||
|         logs = LogEntry.objects.filter(date__lte=dt.datetime(1980, 1, 1)) |         logs = LogEntry.objects.filter(date__lte=dt.datetime(1980, 1, 1)) | ||||||
|         self.assertEqual(logs.count(), 10) |         assert logs.count() == 10 | ||||||
|  |  | ||||||
|         logs = LogEntry.objects.filter( |         logs = LogEntry.objects.filter( | ||||||
|             date__lte=dt.datetime(1980, 1, 1), |             date__lte=dt.datetime(1980, 1, 1), date__gte=dt.datetime(1975, 1, 1) | ||||||
|             date__gte=dt.datetime(1975, 1, 1), |  | ||||||
|         ) |         ) | ||||||
|         self.assertEqual(logs.count(), 5) |         assert logs.count() == 5 | ||||||
|  |  | ||||||
|     def test_datetime_validation(self): |     def test_datetime_validation(self): | ||||||
|         """Ensure that invalid values cannot be assigned to datetime |         """Ensure that invalid values cannot be assigned to datetime | ||||||
|         fields. |         fields. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class LogEntry(Document): |         class LogEntry(Document): | ||||||
|             time = DateTimeField() |             time = DateTimeField() | ||||||
|  |  | ||||||
| @@ -169,45 +166,51 @@ class TestDateTimeField(MongoDBTestCase): | |||||||
|         log.time = dt.date.today() |         log.time = dt.date.today() | ||||||
|         log.validate() |         log.validate() | ||||||
|  |  | ||||||
|         log.time = dt.datetime.now().isoformat(' ') |         log.time = dt.datetime.now().isoformat(" ") | ||||||
|         log.validate() |         log.validate() | ||||||
|  |  | ||||||
|         log.time = '2019-05-16 21:42:57.897847' |         log.time = "2019-05-16 21:42:57.897847" | ||||||
|         log.validate() |         log.validate() | ||||||
|  |  | ||||||
|         if dateutil: |         if dateutil: | ||||||
|             log.time = dt.datetime.now().isoformat('T') |             log.time = dt.datetime.now().isoformat("T") | ||||||
|             log.validate() |             log.validate() | ||||||
|  |  | ||||||
|         log.time = -1 |         log.time = -1 | ||||||
|         self.assertRaises(ValidationError, log.validate) |         with pytest.raises(ValidationError): | ||||||
|         log.time = 'ABC' |             log.validate() | ||||||
|         self.assertRaises(ValidationError, log.validate) |         log.time = "ABC" | ||||||
|         log.time = '2019-05-16 21:GARBAGE:12' |         with pytest.raises(ValidationError): | ||||||
|         self.assertRaises(ValidationError, log.validate) |             log.validate() | ||||||
|         log.time = '2019-05-16 21:42:57.GARBAGE' |         log.time = "2019-05-16 21:GARBAGE:12" | ||||||
|         self.assertRaises(ValidationError, log.validate) |         with pytest.raises(ValidationError): | ||||||
|         log.time = '2019-05-16 21:42:57.123.456' |             log.validate() | ||||||
|         self.assertRaises(ValidationError, log.validate) |         log.time = "2019-05-16 21:42:57.GARBAGE" | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             log.validate() | ||||||
|  |         log.time = "2019-05-16 21:42:57.123.456" | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             log.validate() | ||||||
|  |  | ||||||
|     def test_parse_datetime_as_str(self): |     def test_parse_datetime_as_str(self): | ||||||
|         class DTDoc(Document): |         class DTDoc(Document): | ||||||
|             date = DateTimeField() |             date = DateTimeField() | ||||||
|  |  | ||||||
|         date_str = '2019-03-02 22:26:01' |         date_str = "2019-03-02 22:26:01" | ||||||
|  |  | ||||||
|         # make sure that passing a parsable datetime works |         # make sure that passing a parsable datetime works | ||||||
|         dtd = DTDoc() |         dtd = DTDoc() | ||||||
|         dtd.date = date_str |         dtd.date = date_str | ||||||
|         self.assertIsInstance(dtd.date, six.string_types) |         assert isinstance(dtd.date, str) | ||||||
|         dtd.save() |         dtd.save() | ||||||
|         dtd.reload() |         dtd.reload() | ||||||
|  |  | ||||||
|         self.assertIsInstance(dtd.date, dt.datetime) |         assert isinstance(dtd.date, dt.datetime) | ||||||
|         self.assertEqual(str(dtd.date), date_str) |         assert str(dtd.date) == date_str | ||||||
|  |  | ||||||
|         dtd.date = 'January 1st, 9999999999' |         dtd.date = "January 1st, 9999999999" | ||||||
|         self.assertRaises(ValidationError, dtd.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             dtd.validate() | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestDateTimeTzAware(MongoDBTestCase): | class TestDateTimeTzAware(MongoDBTestCase): | ||||||
| @@ -217,7 +220,7 @@ class TestDateTimeTzAware(MongoDBTestCase): | |||||||
|         connection._connections = {} |         connection._connections = {} | ||||||
|         connection._dbs = {} |         connection._dbs = {} | ||||||
|  |  | ||||||
|         connect(db='mongoenginetest', tz_aware=True) |         connect(db="mongoenginetest", tz_aware=True) | ||||||
|  |  | ||||||
|         class LogEntry(Document): |         class LogEntry(Document): | ||||||
|             time = DateTimeField() |             time = DateTimeField() | ||||||
| @@ -228,4 +231,4 @@ class TestDateTimeTzAware(MongoDBTestCase): | |||||||
|  |  | ||||||
|         log = LogEntry.objects.first() |         log = LogEntry.objects.first() | ||||||
|         log.time = dt.datetime(2013, 1, 1, 0, 0, 0) |         log.time = dt.datetime(2013, 1, 1, 0, 0, 0) | ||||||
|         self.assertEqual(['time'], log._changed_fields) |         assert ["time"] == log._changed_fields | ||||||
|   | |||||||
| @@ -1,39 +1,44 @@ | |||||||
| # -*- coding: utf-8 -*- | # -*- coding: utf-8 -*- | ||||||
| from decimal import Decimal | from decimal import Decimal | ||||||
|  |  | ||||||
| from mongoengine import * | import pytest | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
| from tests.utils import MongoDBTestCase | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestDecimalField(MongoDBTestCase): | class TestDecimalField(MongoDBTestCase): | ||||||
|  |  | ||||||
|     def test_validation(self): |     def test_validation(self): | ||||||
|         """Ensure that invalid values cannot be assigned to decimal fields. |         """Ensure that invalid values cannot be assigned to decimal fields. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             height = DecimalField(min_value=Decimal('0.1'), |             height = DecimalField(min_value=Decimal("0.1"), max_value=Decimal("3.5")) | ||||||
|                                   max_value=Decimal('3.5')) |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|  |  | ||||||
|         Person(height=Decimal('1.89')).save() |         Person(height=Decimal("1.89")).save() | ||||||
|         person = Person.objects.first() |         person = Person.objects.first() | ||||||
|         self.assertEqual(person.height, Decimal('1.89')) |         assert person.height == Decimal("1.89") | ||||||
|  |  | ||||||
|         person.height = '2.0' |         person.height = "2.0" | ||||||
|         person.save() |         person.save() | ||||||
|         person.height = 0.01 |         person.height = 0.01 | ||||||
|         self.assertRaises(ValidationError, person.validate) |         with pytest.raises(ValidationError): | ||||||
|         person.height = Decimal('0.01') |             person.validate() | ||||||
|         self.assertRaises(ValidationError, person.validate) |         person.height = Decimal("0.01") | ||||||
|         person.height = Decimal('4.0') |         with pytest.raises(ValidationError): | ||||||
|         self.assertRaises(ValidationError, person.validate) |             person.validate() | ||||||
|         person.height = 'something invalid' |         person.height = Decimal("4.0") | ||||||
|         self.assertRaises(ValidationError, person.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             person.validate() | ||||||
|  |         person.height = "something invalid" | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             person.validate() | ||||||
|  |  | ||||||
|         person_2 = Person(height='something invalid') |         person_2 = Person(height="something invalid") | ||||||
|         self.assertRaises(ValidationError, person_2.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             person_2.validate() | ||||||
|  |  | ||||||
|     def test_comparison(self): |     def test_comparison(self): | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
| @@ -46,11 +51,11 @@ class TestDecimalField(MongoDBTestCase): | |||||||
|         Person(money=8).save() |         Person(money=8).save() | ||||||
|         Person(money=10).save() |         Person(money=10).save() | ||||||
|  |  | ||||||
|         self.assertEqual(2, Person.objects(money__gt=Decimal("7")).count()) |         assert 2 == Person.objects(money__gt=Decimal("7")).count() | ||||||
|         self.assertEqual(2, Person.objects(money__gt=7).count()) |         assert 2 == Person.objects(money__gt=7).count() | ||||||
|         self.assertEqual(2, Person.objects(money__gt="7").count()) |         assert 2 == Person.objects(money__gt="7").count() | ||||||
|  |  | ||||||
|         self.assertEqual(3, Person.objects(money__gte="7").count()) |         assert 3 == Person.objects(money__gte="7").count() | ||||||
|  |  | ||||||
|     def test_storage(self): |     def test_storage(self): | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
| @@ -58,7 +63,14 @@ class TestDecimalField(MongoDBTestCase): | |||||||
|             string_value = DecimalField(precision=4, force_string=True) |             string_value = DecimalField(precision=4, force_string=True) | ||||||
|  |  | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|         values_to_store = [10, 10.1, 10.11, "10.111", Decimal("10.1111"), Decimal("10.11111")] |         values_to_store = [ | ||||||
|  |             10, | ||||||
|  |             10.1, | ||||||
|  |             10.11, | ||||||
|  |             "10.111", | ||||||
|  |             Decimal("10.1111"), | ||||||
|  |             Decimal("10.11111"), | ||||||
|  |         ] | ||||||
|         for store_at_creation in [True, False]: |         for store_at_creation in [True, False]: | ||||||
|             for value in values_to_store: |             for value in values_to_store: | ||||||
|                 # to_python is called explicitly if values were sent in the kwargs of __init__ |                 # to_python is called explicitly if values were sent in the kwargs of __init__ | ||||||
| @@ -72,20 +84,27 @@ class TestDecimalField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         # How its stored |         # How its stored | ||||||
|         expected = [ |         expected = [ | ||||||
|             {'float_value': 10.0, 'string_value': '10.0000'}, |             {"float_value": 10.0, "string_value": "10.0000"}, | ||||||
|             {'float_value': 10.1, 'string_value': '10.1000'}, |             {"float_value": 10.1, "string_value": "10.1000"}, | ||||||
|             {'float_value': 10.11, 'string_value': '10.1100'}, |             {"float_value": 10.11, "string_value": "10.1100"}, | ||||||
|             {'float_value': 10.111, 'string_value': '10.1110'}, |             {"float_value": 10.111, "string_value": "10.1110"}, | ||||||
|             {'float_value': 10.1111, 'string_value': '10.1111'}, |             {"float_value": 10.1111, "string_value": "10.1111"}, | ||||||
|             {'float_value': 10.1111, 'string_value': '10.1111'}] |             {"float_value": 10.1111, "string_value": "10.1111"}, | ||||||
|  |         ] | ||||||
|         expected.extend(expected) |         expected.extend(expected) | ||||||
|         actual = list(Person.objects.exclude('id').as_pymongo()) |         actual = list(Person.objects.exclude("id").as_pymongo()) | ||||||
|         self.assertEqual(expected, actual) |         assert expected == actual | ||||||
|  |  | ||||||
|         # How it comes out locally |         # How it comes out locally | ||||||
|         expected = [Decimal('10.0000'), Decimal('10.1000'), Decimal('10.1100'), |         expected = [ | ||||||
|                     Decimal('10.1110'), Decimal('10.1111'), Decimal('10.1111')] |             Decimal("10.0000"), | ||||||
|  |             Decimal("10.1000"), | ||||||
|  |             Decimal("10.1100"), | ||||||
|  |             Decimal("10.1110"), | ||||||
|  |             Decimal("10.1111"), | ||||||
|  |             Decimal("10.1111"), | ||||||
|  |         ] | ||||||
|         expected.extend(expected) |         expected.extend(expected) | ||||||
|         for field_name in ['float_value', 'string_value']: |         for field_name in ["float_value", "string_value"]: | ||||||
|             actual = list(Person.objects().scalar(field_name)) |             actual = list(Person.objects().scalar(field_name)) | ||||||
|             self.assertEqual(expected, actual) |             assert expected == actual | ||||||
|   | |||||||
| @@ -1,100 +1,123 @@ | |||||||
| # -*- coding: utf-8 -*- | # -*- coding: utf-8 -*- | ||||||
|  | from bson import InvalidDocument | ||||||
|  | import pytest | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| from mongoengine.base import BaseDict | from mongoengine.base import BaseDict | ||||||
|  | from mongoengine.mongodb_support import MONGODB_36, get_mongodb_version | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase, get_as_pymongo | from tests.utils import MongoDBTestCase, get_as_pymongo | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestDictField(MongoDBTestCase): | class TestDictField(MongoDBTestCase): | ||||||
|  |  | ||||||
|     def test_storage(self): |     def test_storage(self): | ||||||
|         class BlogPost(Document): |         class BlogPost(Document): | ||||||
|             info = DictField() |             info = DictField() | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|         info = {'testkey': 'testvalue'} |         info = {"testkey": "testvalue"} | ||||||
|         post = BlogPost(info=info).save() |         post = BlogPost(info=info).save() | ||||||
|         self.assertEqual( |         assert get_as_pymongo(post) == {"_id": post.id, "info": info} | ||||||
|             get_as_pymongo(post), |  | ||||||
|             { |  | ||||||
|                 '_id': post.id, |  | ||||||
|                 'info': info |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_general_things(self): |     def test_validate_invalid_type(self): | ||||||
|         """Ensure that dict types work as expected.""" |         class BlogPost(Document): | ||||||
|  |             info = DictField() | ||||||
|  |  | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         invalid_infos = ["my post", ["test", "test"], {1: "test"}] | ||||||
|  |         for invalid_info in invalid_infos: | ||||||
|  |             with pytest.raises(ValidationError): | ||||||
|  |                 BlogPost(info=invalid_info).validate() | ||||||
|  |  | ||||||
|  |     def test_keys_with_dots_or_dollars(self): | ||||||
|         class BlogPost(Document): |         class BlogPost(Document): | ||||||
|             info = DictField() |             info = DictField() | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|         post = BlogPost() |         post = BlogPost() | ||||||
|         post.info = 'my post' |  | ||||||
|         self.assertRaises(ValidationError, post.validate) |  | ||||||
|  |  | ||||||
|         post.info = ['test', 'test'] |         post.info = {"$title": "test"} | ||||||
|         self.assertRaises(ValidationError, post.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             post.validate() | ||||||
|  |  | ||||||
|         post.info = {'$title': 'test'} |         post.info = {"nested": {"$title": "test"}} | ||||||
|         self.assertRaises(ValidationError, post.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             post.validate() | ||||||
|  |  | ||||||
|         post.info = {'nested': {'$title': 'test'}} |         post.info = {"$title.test": "test"} | ||||||
|         self.assertRaises(ValidationError, post.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             post.validate() | ||||||
|  |  | ||||||
|         post.info = {'the.title': 'test'} |         post.info = {"nested": {"the.title": "test"}} | ||||||
|         self.assertRaises(ValidationError, post.validate) |         if get_mongodb_version() < MONGODB_36: | ||||||
|  |             # MongoDB < 3.6 rejects dots | ||||||
|  |             # To avoid checking the mongodb version from the DictField class | ||||||
|  |             # we rely on MongoDB to reject the data during the save | ||||||
|  |             post.validate() | ||||||
|  |             with pytest.raises(InvalidDocument): | ||||||
|  |                 post.save() | ||||||
|  |         else: | ||||||
|  |             post.validate() | ||||||
|  |  | ||||||
|         post.info = {'nested': {'the.title': 'test'}} |         post.info = {"dollar_and_dot": {"te$st.test": "test"}} | ||||||
|         self.assertRaises(ValidationError, post.validate) |         if get_mongodb_version() < MONGODB_36: | ||||||
|  |             post.validate() | ||||||
|  |             with pytest.raises(InvalidDocument): | ||||||
|  |                 post.save() | ||||||
|  |         else: | ||||||
|  |             post.validate() | ||||||
|  |  | ||||||
|         post.info = {1: 'test'} |     def test_general_things(self): | ||||||
|         self.assertRaises(ValidationError, post.validate) |         """Ensure that dict types work as expected.""" | ||||||
|  |  | ||||||
|         post.info = {'title': 'test'} |         class BlogPost(Document): | ||||||
|  |             info = DictField() | ||||||
|  |  | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         post = BlogPost(info={"title": "test"}) | ||||||
|         post.save() |         post.save() | ||||||
|  |  | ||||||
|         post = BlogPost() |         post = BlogPost() | ||||||
|         post.info = {'title': 'dollar_sign', 'details': {'te$t': 'test'}} |         post.info = {"title": "dollar_sign", "details": {"te$t": "test"}} | ||||||
|         post.save() |         post.save() | ||||||
|  |  | ||||||
|         post = BlogPost() |         post = BlogPost() | ||||||
|         post.info = {'details': {'test': 'test'}} |         post.info = {"details": {"test": "test"}} | ||||||
|         post.save() |         post.save() | ||||||
|  |  | ||||||
|         post = BlogPost() |         post = BlogPost() | ||||||
|         post.info = {'details': {'test': 3}} |         post.info = {"details": {"test": 3}} | ||||||
|         post.save() |         post.save() | ||||||
|  |  | ||||||
|         self.assertEqual(BlogPost.objects.count(), 4) |         assert BlogPost.objects.count() == 4 | ||||||
|         self.assertEqual( |         assert BlogPost.objects.filter(info__title__exact="test").count() == 1 | ||||||
|             BlogPost.objects.filter(info__title__exact='test').count(), 1) |         assert BlogPost.objects.filter(info__details__test__exact="test").count() == 1 | ||||||
|         self.assertEqual( |  | ||||||
|             BlogPost.objects.filter(info__details__test__exact='test').count(), 1) |  | ||||||
|  |  | ||||||
|         post = BlogPost.objects.filter(info__title__exact='dollar_sign').first() |         post = BlogPost.objects.filter(info__title__exact="dollar_sign").first() | ||||||
|         self.assertIn('te$t', post['info']['details']) |         assert "te$t" in post["info"]["details"] | ||||||
|  |  | ||||||
|         # Confirm handles non strings or non existing keys |         # Confirm handles non strings or non existing keys | ||||||
|         self.assertEqual( |         assert BlogPost.objects.filter(info__details__test__exact=5).count() == 0 | ||||||
|             BlogPost.objects.filter(info__details__test__exact=5).count(), 0) |         assert BlogPost.objects.filter(info__made_up__test__exact="test").count() == 0 | ||||||
|         self.assertEqual( |  | ||||||
|             BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0) |  | ||||||
|  |  | ||||||
|         post = BlogPost.objects.create(info={'title': 'original'}) |         post = BlogPost.objects.create(info={"title": "original"}) | ||||||
|         post.info.update({'title': 'updated'}) |         post.info.update({"title": "updated"}) | ||||||
|         post.save() |         post.save() | ||||||
|         post.reload() |         post.reload() | ||||||
|         self.assertEqual('updated', post.info['title']) |         assert "updated" == post.info["title"] | ||||||
|  |  | ||||||
|         post.info.setdefault('authors', []) |         post.info.setdefault("authors", []) | ||||||
|         post.save() |         post.save() | ||||||
|         post.reload() |         post.reload() | ||||||
|         self.assertEqual([], post.info['authors']) |         assert [] == post.info["authors"] | ||||||
|  |  | ||||||
|     def test_dictfield_dump_document(self): |     def test_dictfield_dump_document(self): | ||||||
|         """Ensure a DictField can handle another document's dump.""" |         """Ensure a DictField can handle another document's dump.""" | ||||||
|  |  | ||||||
|         class Doc(Document): |         class Doc(Document): | ||||||
|             field = DictField() |             field = DictField() | ||||||
|  |  | ||||||
| @@ -106,51 +129,60 @@ class TestDictField(MongoDBTestCase): | |||||||
|             id = IntField(primary_key=True, default=1) |             id = IntField(primary_key=True, default=1) | ||||||
|             recursive = DictField() |             recursive = DictField() | ||||||
|  |  | ||||||
|             meta = {'allow_inheritance': True} |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|         class ToEmbedChild(ToEmbedParent): |         class ToEmbedChild(ToEmbedParent): | ||||||
|             pass |             pass | ||||||
|  |  | ||||||
|         to_embed_recursive = ToEmbed(id=1).save() |         to_embed_recursive = ToEmbed(id=1).save() | ||||||
|         to_embed = ToEmbed( |         to_embed = ToEmbed( | ||||||
|             id=2, recursive=to_embed_recursive.to_mongo().to_dict()).save() |             id=2, recursive=to_embed_recursive.to_mongo().to_dict() | ||||||
|  |         ).save() | ||||||
|         doc = Doc(field=to_embed.to_mongo().to_dict()) |         doc = Doc(field=to_embed.to_mongo().to_dict()) | ||||||
|         doc.save() |         doc.save() | ||||||
|         self.assertIsInstance(doc.field, dict) |         assert isinstance(doc.field, dict) | ||||||
|         self.assertEqual(doc.field, {'_id': 2, 'recursive': {'_id': 1, 'recursive': {}}}) |         assert doc.field == {"_id": 2, "recursive": {"_id": 1, "recursive": {}}} | ||||||
|         # Same thing with a Document with a _cls field |         # Same thing with a Document with a _cls field | ||||||
|         to_embed_recursive = ToEmbedChild(id=1).save() |         to_embed_recursive = ToEmbedChild(id=1).save() | ||||||
|         to_embed_child = ToEmbedChild( |         to_embed_child = ToEmbedChild( | ||||||
|             id=2, recursive=to_embed_recursive.to_mongo().to_dict()).save() |             id=2, recursive=to_embed_recursive.to_mongo().to_dict() | ||||||
|  |         ).save() | ||||||
|         doc = Doc(field=to_embed_child.to_mongo().to_dict()) |         doc = Doc(field=to_embed_child.to_mongo().to_dict()) | ||||||
|         doc.save() |         doc.save() | ||||||
|         self.assertIsInstance(doc.field, dict) |         assert isinstance(doc.field, dict) | ||||||
|         expected = { |         expected = { | ||||||
|             '_id': 2, '_cls': 'ToEmbedParent.ToEmbedChild', |             "_id": 2, | ||||||
|             'recursive': {'_id': 1, '_cls': 'ToEmbedParent.ToEmbedChild', 'recursive': {}} |             "_cls": "ToEmbedParent.ToEmbedChild", | ||||||
|  |             "recursive": { | ||||||
|  |                 "_id": 1, | ||||||
|  |                 "_cls": "ToEmbedParent.ToEmbedChild", | ||||||
|  |                 "recursive": {}, | ||||||
|  |             }, | ||||||
|         } |         } | ||||||
|         self.assertEqual(doc.field, expected) |         assert doc.field == expected | ||||||
|  |  | ||||||
|     def test_dictfield_strict(self): |     def test_dictfield_strict(self): | ||||||
|         """Ensure that dict field handles validation if provided a strict field type.""" |         """Ensure that dict field handles validation if provided a strict field type.""" | ||||||
|  |  | ||||||
|         class Simple(Document): |         class Simple(Document): | ||||||
|             mapping = DictField(field=IntField()) |             mapping = DictField(field=IntField()) | ||||||
|  |  | ||||||
|         Simple.drop_collection() |         Simple.drop_collection() | ||||||
|  |  | ||||||
|         e = Simple() |         e = Simple() | ||||||
|         e.mapping['someint'] = 1 |         e.mapping["someint"] = 1 | ||||||
|         e.save() |         e.save() | ||||||
|  |  | ||||||
|         # try creating an invalid mapping |         # try creating an invalid mapping | ||||||
|         with self.assertRaises(ValidationError): |         with pytest.raises(ValidationError): | ||||||
|             e.mapping['somestring'] = "abc" |             e.mapping["somestring"] = "abc" | ||||||
|             e.save() |             e.save() | ||||||
|  |  | ||||||
|     def test_dictfield_complex(self): |     def test_dictfield_complex(self): | ||||||
|         """Ensure that the dict field can handle the complex types.""" |         """Ensure that the dict field can handle the complex types.""" | ||||||
|  |  | ||||||
|         class SettingBase(EmbeddedDocument): |         class SettingBase(EmbeddedDocument): | ||||||
|             meta = {'allow_inheritance': True} |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|         class StringSetting(SettingBase): |         class StringSetting(SettingBase): | ||||||
|             value = StringField() |             value = StringField() | ||||||
| @@ -164,73 +196,76 @@ class TestDictField(MongoDBTestCase): | |||||||
|         Simple.drop_collection() |         Simple.drop_collection() | ||||||
|  |  | ||||||
|         e = Simple() |         e = Simple() | ||||||
|         e.mapping['somestring'] = StringSetting(value='foo') |         e.mapping["somestring"] = StringSetting(value="foo") | ||||||
|         e.mapping['someint'] = IntegerSetting(value=42) |         e.mapping["someint"] = IntegerSetting(value=42) | ||||||
|         e.mapping['nested_dict'] = {'number': 1, 'string': 'Hi!', |         e.mapping["nested_dict"] = { | ||||||
|                                     'float': 1.001, |             "number": 1, | ||||||
|                                     'complex': IntegerSetting(value=42), |             "string": "Hi!", | ||||||
|                                     'list': [IntegerSetting(value=42), |             "float": 1.001, | ||||||
|                                              StringSetting(value='foo')]} |             "complex": IntegerSetting(value=42), | ||||||
|  |             "list": [IntegerSetting(value=42), StringSetting(value="foo")], | ||||||
|  |         } | ||||||
|         e.save() |         e.save() | ||||||
|  |  | ||||||
|         e2 = Simple.objects.get(id=e.id) |         e2 = Simple.objects.get(id=e.id) | ||||||
|         self.assertIsInstance(e2.mapping['somestring'], StringSetting) |         assert isinstance(e2.mapping["somestring"], StringSetting) | ||||||
|         self.assertIsInstance(e2.mapping['someint'], IntegerSetting) |         assert isinstance(e2.mapping["someint"], IntegerSetting) | ||||||
|  |  | ||||||
|         # Test querying |         # Test querying | ||||||
|         self.assertEqual( |         assert Simple.objects.filter(mapping__someint__value=42).count() == 1 | ||||||
|             Simple.objects.filter(mapping__someint__value=42).count(), 1) |         assert Simple.objects.filter(mapping__nested_dict__number=1).count() == 1 | ||||||
|         self.assertEqual( |         assert ( | ||||||
|             Simple.objects.filter(mapping__nested_dict__number=1).count(), 1) |             Simple.objects.filter(mapping__nested_dict__complex__value=42).count() == 1 | ||||||
|         self.assertEqual( |         ) | ||||||
|             Simple.objects.filter(mapping__nested_dict__complex__value=42).count(), 1) |         assert ( | ||||||
|         self.assertEqual( |             Simple.objects.filter(mapping__nested_dict__list__0__value=42).count() == 1 | ||||||
|             Simple.objects.filter(mapping__nested_dict__list__0__value=42).count(), 1) |         ) | ||||||
|         self.assertEqual( |         assert ( | ||||||
|             Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 1) |             Simple.objects.filter(mapping__nested_dict__list__1__value="foo").count() | ||||||
|  |             == 1 | ||||||
|  |         ) | ||||||
|  |  | ||||||
|         # Confirm can update |         # Confirm can update | ||||||
|  |         Simple.objects().update(set__mapping={"someint": IntegerSetting(value=10)}) | ||||||
|         Simple.objects().update( |         Simple.objects().update( | ||||||
|             set__mapping={"someint": IntegerSetting(value=10)}) |             set__mapping__nested_dict__list__1=StringSetting(value="Boo") | ||||||
|         Simple.objects().update( |         ) | ||||||
|             set__mapping__nested_dict__list__1=StringSetting(value='Boo')) |         assert ( | ||||||
|         self.assertEqual( |             Simple.objects.filter(mapping__nested_dict__list__1__value="foo").count() | ||||||
|             Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 0) |             == 0 | ||||||
|         self.assertEqual( |         ) | ||||||
|             Simple.objects.filter(mapping__nested_dict__list__1__value='Boo').count(), 1) |         assert ( | ||||||
|  |             Simple.objects.filter(mapping__nested_dict__list__1__value="Boo").count() | ||||||
|  |             == 1 | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     def test_push_dict(self): |     def test_push_dict(self): | ||||||
|         class MyModel(Document): |         class MyModel(Document): | ||||||
|             events = ListField(DictField()) |             events = ListField(DictField()) | ||||||
|  |  | ||||||
|         doc = MyModel(events=[{'a': 1}]).save() |         doc = MyModel(events=[{"a": 1}]).save() | ||||||
|         raw_doc = get_as_pymongo(doc) |         raw_doc = get_as_pymongo(doc) | ||||||
|         expected_raw_doc = { |         expected_raw_doc = {"_id": doc.id, "events": [{"a": 1}]} | ||||||
|             '_id': doc.id, |         assert raw_doc == expected_raw_doc | ||||||
|             'events': [{'a': 1}] |  | ||||||
|         } |  | ||||||
|         self.assertEqual(raw_doc, expected_raw_doc) |  | ||||||
|  |  | ||||||
|         MyModel.objects(id=doc.id).update(push__events={}) |         MyModel.objects(id=doc.id).update(push__events={}) | ||||||
|         raw_doc = get_as_pymongo(doc) |         raw_doc = get_as_pymongo(doc) | ||||||
|         expected_raw_doc = { |         expected_raw_doc = {"_id": doc.id, "events": [{"a": 1}, {}]} | ||||||
|             '_id': doc.id, |         assert raw_doc == expected_raw_doc | ||||||
|             'events': [{'a': 1}, {}] |  | ||||||
|         } |  | ||||||
|         self.assertEqual(raw_doc, expected_raw_doc) |  | ||||||
|  |  | ||||||
|     def test_ensure_unique_default_instances(self): |     def test_ensure_unique_default_instances(self): | ||||||
|         """Ensure that every field has it's own unique default instance.""" |         """Ensure that every field has it's own unique default instance.""" | ||||||
|  |  | ||||||
|         class D(Document): |         class D(Document): | ||||||
|             data = DictField() |             data = DictField() | ||||||
|             data2 = DictField(default=lambda: {}) |             data2 = DictField(default=lambda: {}) | ||||||
|  |  | ||||||
|         d1 = D() |         d1 = D() | ||||||
|         d1.data['foo'] = 'bar' |         d1.data["foo"] = "bar" | ||||||
|         d1.data2['foo'] = 'bar' |         d1.data2["foo"] = "bar" | ||||||
|         d2 = D() |         d2 = D() | ||||||
|         self.assertEqual(d2.data, {}) |         assert d2.data == {} | ||||||
|         self.assertEqual(d2.data2, {}) |         assert d2.data2 == {} | ||||||
|  |  | ||||||
|     def test_dict_field_invalid_dict_value(self): |     def test_dict_field_invalid_dict_value(self): | ||||||
|         class DictFieldTest(Document): |         class DictFieldTest(Document): | ||||||
| @@ -240,11 +275,13 @@ class TestDictField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         test = DictFieldTest(dictionary=None) |         test = DictFieldTest(dictionary=None) | ||||||
|         test.dictionary  # Just access to test getter |         test.dictionary  # Just access to test getter | ||||||
|         self.assertRaises(ValidationError, test.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             test.validate() | ||||||
|  |  | ||||||
|         test = DictFieldTest(dictionary=False) |         test = DictFieldTest(dictionary=False) | ||||||
|         test.dictionary  # Just access to test getter |         test.dictionary  # Just access to test getter | ||||||
|         self.assertRaises(ValidationError, test.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             test.validate() | ||||||
|  |  | ||||||
|     def test_dict_field_raises_validation_error_if_wrongly_assign_embedded_doc(self): |     def test_dict_field_raises_validation_error_if_wrongly_assign_embedded_doc(self): | ||||||
|         class DictFieldTest(Document): |         class DictFieldTest(Document): | ||||||
| @@ -255,31 +292,34 @@ class TestDictField(MongoDBTestCase): | |||||||
|         class Embedded(EmbeddedDocument): |         class Embedded(EmbeddedDocument): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|         embed = Embedded(name='garbage') |         embed = Embedded(name="garbage") | ||||||
|         doc = DictFieldTest(dictionary=embed) |         doc = DictFieldTest(dictionary=embed) | ||||||
|         with self.assertRaises(ValidationError) as ctx_err: |         with pytest.raises(ValidationError) as exc_info: | ||||||
|             doc.validate() |             doc.validate() | ||||||
|         self.assertIn("'dictionary'", str(ctx_err.exception)) |  | ||||||
|         self.assertIn('Only dictionaries may be used in a DictField', str(ctx_err.exception)) |         error_msg = str(exc_info.value) | ||||||
|  |         assert "'dictionary'" in error_msg | ||||||
|  |         assert "Only dictionaries may be used in a DictField" in error_msg | ||||||
|  |  | ||||||
|     def test_atomic_update_dict_field(self): |     def test_atomic_update_dict_field(self): | ||||||
|         """Ensure that the entire DictField can be atomically updated.""" |         """Ensure that the entire DictField can be atomically updated.""" | ||||||
|  |  | ||||||
|         class Simple(Document): |         class Simple(Document): | ||||||
|             mapping = DictField(field=ListField(IntField(required=True))) |             mapping = DictField(field=ListField(IntField(required=True))) | ||||||
|  |  | ||||||
|         Simple.drop_collection() |         Simple.drop_collection() | ||||||
|  |  | ||||||
|         e = Simple() |         e = Simple() | ||||||
|         e.mapping['someints'] = [1, 2] |         e.mapping["someints"] = [1, 2] | ||||||
|         e.save() |         e.save() | ||||||
|         e.update(set__mapping={"ints": [3, 4]}) |         e.update(set__mapping={"ints": [3, 4]}) | ||||||
|         e.reload() |         e.reload() | ||||||
|         self.assertEqual(BaseDict, type(e.mapping)) |         assert isinstance(e.mapping, BaseDict) | ||||||
|         self.assertEqual({"ints": [3, 4]}, e.mapping) |         assert {"ints": [3, 4]} == e.mapping | ||||||
|  |  | ||||||
|         # try creating an invalid mapping |         # try creating an invalid mapping | ||||||
|         with self.assertRaises(ValueError): |         with pytest.raises(ValueError): | ||||||
|             e.update(set__mapping={"somestrings": ["foo", "bar", ]}) |             e.update(set__mapping={"somestrings": ["foo", "bar"]}) | ||||||
|  |  | ||||||
|     def test_dictfield_with_referencefield_complex_nesting_cases(self): |     def test_dictfield_with_referencefield_complex_nesting_cases(self): | ||||||
|         """Ensure complex nesting inside DictField handles dereferencing of ReferenceField(dbref=True | False)""" |         """Ensure complex nesting inside DictField handles dereferencing of ReferenceField(dbref=True | False)""" | ||||||
| @@ -296,29 +336,33 @@ class TestDictField(MongoDBTestCase): | |||||||
|             mapping5 = DictField(DictField(field=ReferenceField(Doc, dbref=False))) |             mapping5 = DictField(DictField(field=ReferenceField(Doc, dbref=False))) | ||||||
|             mapping6 = DictField(ListField(DictField(ReferenceField(Doc, dbref=True)))) |             mapping6 = DictField(ListField(DictField(ReferenceField(Doc, dbref=True)))) | ||||||
|             mapping7 = DictField(ListField(DictField(ReferenceField(Doc, dbref=False)))) |             mapping7 = DictField(ListField(DictField(ReferenceField(Doc, dbref=False)))) | ||||||
|             mapping8 = DictField(ListField(DictField(ListField(ReferenceField(Doc, dbref=True))))) |             mapping8 = DictField( | ||||||
|             mapping9 = DictField(ListField(DictField(ListField(ReferenceField(Doc, dbref=False))))) |                 ListField(DictField(ListField(ReferenceField(Doc, dbref=True)))) | ||||||
|  |             ) | ||||||
|  |             mapping9 = DictField( | ||||||
|  |                 ListField(DictField(ListField(ReferenceField(Doc, dbref=False)))) | ||||||
|  |             ) | ||||||
|  |  | ||||||
|         Doc.drop_collection() |         Doc.drop_collection() | ||||||
|         Simple.drop_collection() |         Simple.drop_collection() | ||||||
|  |  | ||||||
|         d = Doc(s='aa').save() |         d = Doc(s="aa").save() | ||||||
|         e = Simple() |         e = Simple() | ||||||
|         e.mapping0['someint'] = e.mapping1['someint'] = d |         e.mapping0["someint"] = e.mapping1["someint"] = d | ||||||
|         e.mapping2['someint'] = e.mapping3['someint'] = [d] |         e.mapping2["someint"] = e.mapping3["someint"] = [d] | ||||||
|         e.mapping4['someint'] = e.mapping5['someint'] = {'d': d} |         e.mapping4["someint"] = e.mapping5["someint"] = {"d": d} | ||||||
|         e.mapping6['someint'] = e.mapping7['someint'] = [{'d': d}] |         e.mapping6["someint"] = e.mapping7["someint"] = [{"d": d}] | ||||||
|         e.mapping8['someint'] = e.mapping9['someint'] = [{'d': [d]}] |         e.mapping8["someint"] = e.mapping9["someint"] = [{"d": [d]}] | ||||||
|         e.save() |         e.save() | ||||||
|  |  | ||||||
|         s = Simple.objects.first() |         s = Simple.objects.first() | ||||||
|         self.assertIsInstance(s.mapping0['someint'], Doc) |         assert isinstance(s.mapping0["someint"], Doc) | ||||||
|         self.assertIsInstance(s.mapping1['someint'], Doc) |         assert isinstance(s.mapping1["someint"], Doc) | ||||||
|         self.assertIsInstance(s.mapping2['someint'][0], Doc) |         assert isinstance(s.mapping2["someint"][0], Doc) | ||||||
|         self.assertIsInstance(s.mapping3['someint'][0], Doc) |         assert isinstance(s.mapping3["someint"][0], Doc) | ||||||
|         self.assertIsInstance(s.mapping4['someint']['d'], Doc) |         assert isinstance(s.mapping4["someint"]["d"], Doc) | ||||||
|         self.assertIsInstance(s.mapping5['someint']['d'], Doc) |         assert isinstance(s.mapping5["someint"]["d"], Doc) | ||||||
|         self.assertIsInstance(s.mapping6['someint'][0]['d'], Doc) |         assert isinstance(s.mapping6["someint"][0]["d"], Doc) | ||||||
|         self.assertIsInstance(s.mapping7['someint'][0]['d'], Doc) |         assert isinstance(s.mapping7["someint"][0]["d"], Doc) | ||||||
|         self.assertIsInstance(s.mapping8['someint'][0]['d'][0], Doc) |         assert isinstance(s.mapping8["someint"][0]["d"][0], Doc) | ||||||
|         self.assertIsInstance(s.mapping9['someint'][0]['d'][0], Doc) |         assert isinstance(s.mapping9["someint"][0]["d"][0], Doc) | ||||||
|   | |||||||
| @@ -1,9 +1,9 @@ | |||||||
| # -*- coding: utf-8 -*- | # -*- coding: utf-8 -*- | ||||||
| import sys | import sys | ||||||
| from unittest import SkipTest |  | ||||||
|  | import pytest | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -12,52 +12,52 @@ class TestEmailField(MongoDBTestCase): | |||||||
|         class User(Document): |         class User(Document): | ||||||
|             email = EmailField() |             email = EmailField() | ||||||
|  |  | ||||||
|         user = User(email='ross@example.com') |         user = User(email="ross@example.com") | ||||||
|         user.validate() |         user.validate() | ||||||
|  |  | ||||||
|         user = User(email='ross@example.co.uk') |         user = User(email="ross@example.co.uk") | ||||||
|         user.validate() |         user.validate() | ||||||
|  |  | ||||||
|         user = User(email=('Kofq@rhom0e4klgauOhpbpNdogawnyIKvQS0wk2mjqrgGQ5S' |         user = User( | ||||||
|                            'aJIazqqWkm7.net')) |             email=("Kofq@rhom0e4klgauOhpbpNdogawnyIKvQS0wk2mjqrgGQ5SaJIazqqWkm7.net") | ||||||
|  |         ) | ||||||
|         user.validate() |         user.validate() | ||||||
|  |  | ||||||
|         user = User(email='new-tld@example.technology') |         user = User(email="new-tld@example.technology") | ||||||
|         user.validate() |         user.validate() | ||||||
|  |  | ||||||
|         user = User(email='ross@example.com.') |         user = User(email="ross@example.com.") | ||||||
|         self.assertRaises(ValidationError, user.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             user.validate() | ||||||
|  |  | ||||||
|         # unicode domain |         # unicode domain | ||||||
|         user = User(email=u'user@пример.рф') |         user = User(email=u"user@пример.рф") | ||||||
|         user.validate() |         user.validate() | ||||||
|  |  | ||||||
|         # invalid unicode domain |         # invalid unicode domain | ||||||
|         user = User(email=u'user@пример') |         user = User(email=u"user@пример") | ||||||
|         self.assertRaises(ValidationError, user.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             user.validate() | ||||||
|  |  | ||||||
|         # invalid data type |         # invalid data type | ||||||
|         user = User(email=123) |         user = User(email=123) | ||||||
|         self.assertRaises(ValidationError, user.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             user.validate() | ||||||
|  |  | ||||||
|     def test_email_field_unicode_user(self): |     def test_email_field_unicode_user(self): | ||||||
|         # Don't run this test on pypy3, which doesn't support unicode regex: |  | ||||||
|         # https://bitbucket.org/pypy/pypy/issues/1821/regular-expression-doesnt-find-unicode |  | ||||||
|         if sys.version_info[:2] == (3, 2): |  | ||||||
|             raise SkipTest('unicode email addresses are not supported on PyPy 3') |  | ||||||
|  |  | ||||||
|         class User(Document): |         class User(Document): | ||||||
|             email = EmailField() |             email = EmailField() | ||||||
|  |  | ||||||
|         # unicode user shouldn't validate by default... |         # unicode user shouldn't validate by default... | ||||||
|         user = User(email=u'Dörte@Sörensen.example.com') |         user = User(email=u"Dörte@Sörensen.example.com") | ||||||
|         self.assertRaises(ValidationError, user.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             user.validate() | ||||||
|  |  | ||||||
|         # ...but it should be fine with allow_utf8_user set to True |         # ...but it should be fine with allow_utf8_user set to True | ||||||
|         class User(Document): |         class User(Document): | ||||||
|             email = EmailField(allow_utf8_user=True) |             email = EmailField(allow_utf8_user=True) | ||||||
|  |  | ||||||
|         user = User(email=u'Dörte@Sörensen.example.com') |         user = User(email=u"Dörte@Sörensen.example.com") | ||||||
|         user.validate() |         user.validate() | ||||||
|  |  | ||||||
|     def test_email_field_domain_whitelist(self): |     def test_email_field_domain_whitelist(self): | ||||||
| @@ -65,43 +65,48 @@ class TestEmailField(MongoDBTestCase): | |||||||
|             email = EmailField() |             email = EmailField() | ||||||
|  |  | ||||||
|         # localhost domain shouldn't validate by default... |         # localhost domain shouldn't validate by default... | ||||||
|         user = User(email='me@localhost') |         user = User(email="me@localhost") | ||||||
|         self.assertRaises(ValidationError, user.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             user.validate() | ||||||
|  |  | ||||||
|         # ...but it should be fine if it's whitelisted |         # ...but it should be fine if it's whitelisted | ||||||
|         class User(Document): |         class User(Document): | ||||||
|             email = EmailField(domain_whitelist=['localhost']) |             email = EmailField(domain_whitelist=["localhost"]) | ||||||
|  |  | ||||||
|         user = User(email='me@localhost') |         user = User(email="me@localhost") | ||||||
|         user.validate() |         user.validate() | ||||||
|  |  | ||||||
|     def test_email_domain_validation_fails_if_invalid_idn(self): |     def test_email_domain_validation_fails_if_invalid_idn(self): | ||||||
|         class User(Document): |         class User(Document): | ||||||
|             email = EmailField() |             email = EmailField() | ||||||
|  |  | ||||||
|         invalid_idn = '.google.com' |         invalid_idn = ".google.com" | ||||||
|         user = User(email='me@%s' % invalid_idn) |         user = User(email="me@%s" % invalid_idn) | ||||||
|         with self.assertRaises(ValidationError) as ctx_err: |  | ||||||
|  |         with pytest.raises(ValidationError) as exc_info: | ||||||
|             user.validate() |             user.validate() | ||||||
|         self.assertIn("domain failed IDN encoding", str(ctx_err.exception)) |         assert "domain failed IDN encoding" in str(exc_info.value) | ||||||
|  |  | ||||||
|     def test_email_field_ip_domain(self): |     def test_email_field_ip_domain(self): | ||||||
|         class User(Document): |         class User(Document): | ||||||
|             email = EmailField() |             email = EmailField() | ||||||
|  |  | ||||||
|         valid_ipv4 = 'email@[127.0.0.1]' |         valid_ipv4 = "email@[127.0.0.1]" | ||||||
|         valid_ipv6 = 'email@[2001:dB8::1]' |         valid_ipv6 = "email@[2001:dB8::1]" | ||||||
|         invalid_ip = 'email@[324.0.0.1]' |         invalid_ip = "email@[324.0.0.1]" | ||||||
|  |  | ||||||
|         # IP address as a domain shouldn't validate by default... |         # IP address as a domain shouldn't validate by default... | ||||||
|         user = User(email=valid_ipv4) |         user = User(email=valid_ipv4) | ||||||
|         self.assertRaises(ValidationError, user.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             user.validate() | ||||||
|  |  | ||||||
|         user = User(email=valid_ipv6) |         user = User(email=valid_ipv6) | ||||||
|         self.assertRaises(ValidationError, user.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             user.validate() | ||||||
|  |  | ||||||
|         user = User(email=invalid_ip) |         user = User(email=invalid_ip) | ||||||
|         self.assertRaises(ValidationError, user.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             user.validate() | ||||||
|  |  | ||||||
|         # ...but it should be fine with allow_ip_domain set to True |         # ...but it should be fine with allow_ip_domain set to True | ||||||
|         class User(Document): |         class User(Document): | ||||||
| @@ -115,16 +120,18 @@ class TestEmailField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         # invalid IP should still fail validation |         # invalid IP should still fail validation | ||||||
|         user = User(email=invalid_ip) |         user = User(email=invalid_ip) | ||||||
|         self.assertRaises(ValidationError, user.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             user.validate() | ||||||
|  |  | ||||||
|     def test_email_field_honors_regex(self): |     def test_email_field_honors_regex(self): | ||||||
|         class User(Document): |         class User(Document): | ||||||
|             email = EmailField(regex=r'\w+@example.com') |             email = EmailField(regex=r"\w+@example.com") | ||||||
|  |  | ||||||
|         # Fails regex validation |         # Fails regex validation | ||||||
|         user = User(email='me@foo.com') |         user = User(email="me@foo.com") | ||||||
|         self.assertRaises(ValidationError, user.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             user.validate() | ||||||
|  |  | ||||||
|         # Passes regex validation |         # Passes regex validation | ||||||
|         user = User(email='me@example.com') |         user = User(email="me@example.com") | ||||||
|         self.assertIsNone(user.validate()) |         assert user.validate() is None | ||||||
|   | |||||||
| @@ -1,7 +1,18 @@ | |||||||
| # -*- coding: utf-8 -*- | # -*- coding: utf-8 -*- | ||||||
| from mongoengine import Document, StringField, ValidationError, EmbeddedDocument, EmbeddedDocumentField, \ | import pytest | ||||||
|     InvalidQueryError, LookUpError, IntField, GenericEmbeddedDocumentField, ListField, EmbeddedDocumentListField, \ |  | ||||||
|     ReferenceField | from mongoengine import ( | ||||||
|  |     Document, | ||||||
|  |     EmbeddedDocument, | ||||||
|  |     EmbeddedDocumentField, | ||||||
|  |     GenericEmbeddedDocumentField, | ||||||
|  |     IntField, | ||||||
|  |     InvalidQueryError, | ||||||
|  |     ListField, | ||||||
|  |     LookUpError, | ||||||
|  |     StringField, | ||||||
|  |     ValidationError, | ||||||
|  | ) | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
| @@ -12,37 +23,41 @@ class TestEmbeddedDocumentField(MongoDBTestCase): | |||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|         field = EmbeddedDocumentField(MyDoc) |         field = EmbeddedDocumentField(MyDoc) | ||||||
|         self.assertEqual(field.document_type_obj, MyDoc) |         assert field.document_type_obj == MyDoc | ||||||
|  |  | ||||||
|         field2 = EmbeddedDocumentField('MyDoc') |         field2 = EmbeddedDocumentField("MyDoc") | ||||||
|         self.assertEqual(field2.document_type_obj, 'MyDoc') |         assert field2.document_type_obj == "MyDoc" | ||||||
|  |  | ||||||
|     def test___init___throw_error_if_document_type_is_not_EmbeddedDocument(self): |     def test___init___throw_error_if_document_type_is_not_EmbeddedDocument(self): | ||||||
|         with self.assertRaises(ValidationError): |         with pytest.raises(ValidationError): | ||||||
|             EmbeddedDocumentField(dict) |             EmbeddedDocumentField(dict) | ||||||
|  |  | ||||||
|     def test_document_type_throw_error_if_not_EmbeddedDocument_subclass(self): |     def test_document_type_throw_error_if_not_EmbeddedDocument_subclass(self): | ||||||
|  |  | ||||||
|         class MyDoc(Document): |         class MyDoc(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|         emb = EmbeddedDocumentField('MyDoc') |         emb = EmbeddedDocumentField("MyDoc") | ||||||
|         with self.assertRaises(ValidationError) as ctx: |         with pytest.raises(ValidationError) as exc_info: | ||||||
|             emb.document_type |             emb.document_type | ||||||
|         self.assertIn('Invalid embedded document class provided to an EmbeddedDocumentField', str(ctx.exception)) |         assert ( | ||||||
|  |             "Invalid embedded document class provided to an EmbeddedDocumentField" | ||||||
|  |             in str(exc_info.value) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     def test_embedded_document_field_only_allow_subclasses_of_embedded_document(self): |     def test_embedded_document_field_only_allow_subclasses_of_embedded_document(self): | ||||||
|         # Relates to #1661 |         # Relates to #1661 | ||||||
|         class MyDoc(Document): |         class MyDoc(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|         with self.assertRaises(ValidationError): |         with pytest.raises(ValidationError): | ||||||
|  |  | ||||||
|             class MyFailingDoc(Document): |             class MyFailingDoc(Document): | ||||||
|                 emb = EmbeddedDocumentField(MyDoc) |                 emb = EmbeddedDocumentField(MyDoc) | ||||||
|  |  | ||||||
|         with self.assertRaises(ValidationError): |         with pytest.raises(ValidationError): | ||||||
|  |  | ||||||
|             class MyFailingdoc2(Document): |             class MyFailingdoc2(Document): | ||||||
|                 emb = EmbeddedDocumentField('MyDoc') |                 emb = EmbeddedDocumentField("MyDoc") | ||||||
|  |  | ||||||
|     def test_query_embedded_document_attribute(self): |     def test_query_embedded_document_attribute(self): | ||||||
|         class AdminSettings(EmbeddedDocument): |         class AdminSettings(EmbeddedDocument): | ||||||
| @@ -55,34 +70,31 @@ class TestEmbeddedDocumentField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|  |  | ||||||
|         p = Person( |         p = Person(settings=AdminSettings(foo1="bar1", foo2="bar2"), name="John").save() | ||||||
|             settings=AdminSettings(foo1='bar1', foo2='bar2'), |  | ||||||
|             name='John', |  | ||||||
|         ).save() |  | ||||||
|  |  | ||||||
|         # Test non exiting attribute |         # Test non exiting attribute | ||||||
|         with self.assertRaises(InvalidQueryError) as ctx_err: |         with pytest.raises(InvalidQueryError) as exc_info: | ||||||
|             Person.objects(settings__notexist='bar').first() |             Person.objects(settings__notexist="bar").first() | ||||||
|         self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"') |         assert str(exc_info.value) == u'Cannot resolve field "notexist"' | ||||||
|  |  | ||||||
|         with self.assertRaises(LookUpError): |         with pytest.raises(LookUpError): | ||||||
|             Person.objects.only('settings.notexist') |             Person.objects.only("settings.notexist") | ||||||
|  |  | ||||||
|         # Test existing attribute |         # Test existing attribute | ||||||
|         self.assertEqual(Person.objects(settings__foo1='bar1').first().id, p.id) |         assert Person.objects(settings__foo1="bar1").first().id == p.id | ||||||
|         only_p = Person.objects.only('settings.foo1').first() |         only_p = Person.objects.only("settings.foo1").first() | ||||||
|         self.assertEqual(only_p.settings.foo1, p.settings.foo1) |         assert only_p.settings.foo1 == p.settings.foo1 | ||||||
|         self.assertIsNone(only_p.settings.foo2) |         assert only_p.settings.foo2 is None | ||||||
|         self.assertIsNone(only_p.name) |         assert only_p.name is None | ||||||
|  |  | ||||||
|         exclude_p = Person.objects.exclude('settings.foo1').first() |         exclude_p = Person.objects.exclude("settings.foo1").first() | ||||||
|         self.assertIsNone(exclude_p.settings.foo1) |         assert exclude_p.settings.foo1 is None | ||||||
|         self.assertEqual(exclude_p.settings.foo2, p.settings.foo2) |         assert exclude_p.settings.foo2 == p.settings.foo2 | ||||||
|         self.assertEqual(exclude_p.name, p.name) |         assert exclude_p.name == p.name | ||||||
|  |  | ||||||
|     def test_query_embedded_document_attribute_with_inheritance(self): |     def test_query_embedded_document_attribute_with_inheritance(self): | ||||||
|         class BaseSettings(EmbeddedDocument): |         class BaseSettings(EmbeddedDocument): | ||||||
|             meta = {'allow_inheritance': True} |             meta = {"allow_inheritance": True} | ||||||
|             base_foo = StringField() |             base_foo = StringField() | ||||||
|  |  | ||||||
|         class AdminSettings(BaseSettings): |         class AdminSettings(BaseSettings): | ||||||
| @@ -93,26 +105,26 @@ class TestEmbeddedDocumentField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|  |  | ||||||
|         p = Person(settings=AdminSettings(base_foo='basefoo', sub_foo='subfoo')) |         p = Person(settings=AdminSettings(base_foo="basefoo", sub_foo="subfoo")) | ||||||
|         p.save() |         p.save() | ||||||
|  |  | ||||||
|         # Test non exiting attribute |         # Test non exiting attribute | ||||||
|         with self.assertRaises(InvalidQueryError) as ctx_err: |         with pytest.raises(InvalidQueryError) as exc_info: | ||||||
|             self.assertEqual(Person.objects(settings__notexist='bar').first().id, p.id) |             assert Person.objects(settings__notexist="bar").first().id == p.id | ||||||
|         self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"') |         assert str(exc_info.value) == u'Cannot resolve field "notexist"' | ||||||
|  |  | ||||||
|         # Test existing attribute |         # Test existing attribute | ||||||
|         self.assertEqual(Person.objects(settings__base_foo='basefoo').first().id, p.id) |         assert Person.objects(settings__base_foo="basefoo").first().id == p.id | ||||||
|         self.assertEqual(Person.objects(settings__sub_foo='subfoo').first().id, p.id) |         assert Person.objects(settings__sub_foo="subfoo").first().id == p.id | ||||||
|  |  | ||||||
|         only_p = Person.objects.only('settings.base_foo', 'settings._cls').first() |         only_p = Person.objects.only("settings.base_foo", "settings._cls").first() | ||||||
|         self.assertEqual(only_p.settings.base_foo, 'basefoo') |         assert only_p.settings.base_foo == "basefoo" | ||||||
|         self.assertIsNone(only_p.settings.sub_foo) |         assert only_p.settings.sub_foo is None | ||||||
|  |  | ||||||
|     def test_query_list_embedded_document_with_inheritance(self): |     def test_query_list_embedded_document_with_inheritance(self): | ||||||
|         class Post(EmbeddedDocument): |         class Post(EmbeddedDocument): | ||||||
|             title = StringField(max_length=120, required=True) |             title = StringField(max_length=120, required=True) | ||||||
|             meta = {'allow_inheritance': True} |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|         class TextPost(Post): |         class TextPost(Post): | ||||||
|             content = StringField() |             content = StringField() | ||||||
| @@ -123,22 +135,21 @@ class TestEmbeddedDocumentField(MongoDBTestCase): | |||||||
|         class Record(Document): |         class Record(Document): | ||||||
|             posts = ListField(EmbeddedDocumentField(Post)) |             posts = ListField(EmbeddedDocumentField(Post)) | ||||||
|  |  | ||||||
|         record_movie = Record(posts=[MoviePost(author='John', title='foo')]).save() |         record_movie = Record(posts=[MoviePost(author="John", title="foo")]).save() | ||||||
|         record_text = Record(posts=[TextPost(content='a', title='foo')]).save() |         record_text = Record(posts=[TextPost(content="a", title="foo")]).save() | ||||||
|  |  | ||||||
|         records = list(Record.objects(posts__author=record_movie.posts[0].author)) |         records = list(Record.objects(posts__author=record_movie.posts[0].author)) | ||||||
|         self.assertEqual(len(records), 1) |         assert len(records) == 1 | ||||||
|         self.assertEqual(records[0].id, record_movie.id) |         assert records[0].id == record_movie.id | ||||||
|  |  | ||||||
|         records = list(Record.objects(posts__content=record_text.posts[0].content)) |         records = list(Record.objects(posts__content=record_text.posts[0].content)) | ||||||
|         self.assertEqual(len(records), 1) |         assert len(records) == 1 | ||||||
|         self.assertEqual(records[0].id, record_text.id) |         assert records[0].id == record_text.id | ||||||
|  |  | ||||||
|         self.assertEqual(Record.objects(posts__title='foo').count(), 2) |         assert Record.objects(posts__title="foo").count() == 2 | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestGenericEmbeddedDocumentField(MongoDBTestCase): | class TestGenericEmbeddedDocumentField(MongoDBTestCase): | ||||||
|  |  | ||||||
|     def test_generic_embedded_document(self): |     def test_generic_embedded_document(self): | ||||||
|         class Car(EmbeddedDocument): |         class Car(EmbeddedDocument): | ||||||
|             name = StringField() |             name = StringField() | ||||||
| @@ -153,21 +164,22 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|  |  | ||||||
|         person = Person(name='Test User') |         person = Person(name="Test User") | ||||||
|         person.like = Car(name='Fiat') |         person.like = Car(name="Fiat") | ||||||
|         person.save() |         person.save() | ||||||
|  |  | ||||||
|         person = Person.objects.first() |         person = Person.objects.first() | ||||||
|         self.assertIsInstance(person.like, Car) |         assert isinstance(person.like, Car) | ||||||
|  |  | ||||||
|         person.like = Dish(food="arroz", number=15) |         person.like = Dish(food="arroz", number=15) | ||||||
|         person.save() |         person.save() | ||||||
|  |  | ||||||
|         person = Person.objects.first() |         person = Person.objects.first() | ||||||
|         self.assertIsInstance(person.like, Dish) |         assert isinstance(person.like, Dish) | ||||||
|  |  | ||||||
|     def test_generic_embedded_document_choices(self): |     def test_generic_embedded_document_choices(self): | ||||||
|         """Ensure you can limit GenericEmbeddedDocument choices.""" |         """Ensure you can limit GenericEmbeddedDocument choices.""" | ||||||
|  |  | ||||||
|         class Car(EmbeddedDocument): |         class Car(EmbeddedDocument): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
| @@ -181,20 +193,22 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|  |  | ||||||
|         person = Person(name='Test User') |         person = Person(name="Test User") | ||||||
|         person.like = Car(name='Fiat') |         person.like = Car(name="Fiat") | ||||||
|         self.assertRaises(ValidationError, person.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             person.validate() | ||||||
|  |  | ||||||
|         person.like = Dish(food="arroz", number=15) |         person.like = Dish(food="arroz", number=15) | ||||||
|         person.save() |         person.save() | ||||||
|  |  | ||||||
|         person = Person.objects.first() |         person = Person.objects.first() | ||||||
|         self.assertIsInstance(person.like, Dish) |         assert isinstance(person.like, Dish) | ||||||
|  |  | ||||||
|     def test_generic_list_embedded_document_choices(self): |     def test_generic_list_embedded_document_choices(self): | ||||||
|         """Ensure you can limit GenericEmbeddedDocument choices inside |         """Ensure you can limit GenericEmbeddedDocument choices inside | ||||||
|         a list field. |         a list field. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class Car(EmbeddedDocument): |         class Car(EmbeddedDocument): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
| @@ -208,39 +222,38 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|  |  | ||||||
|         person = Person(name='Test User') |         person = Person(name="Test User") | ||||||
|         person.likes = [Car(name='Fiat')] |         person.likes = [Car(name="Fiat")] | ||||||
|         self.assertRaises(ValidationError, person.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             person.validate() | ||||||
|  |  | ||||||
|         person.likes = [Dish(food="arroz", number=15)] |         person.likes = [Dish(food="arroz", number=15)] | ||||||
|         person.save() |         person.save() | ||||||
|  |  | ||||||
|         person = Person.objects.first() |         person = Person.objects.first() | ||||||
|         self.assertIsInstance(person.likes[0], Dish) |         assert isinstance(person.likes[0], Dish) | ||||||
|  |  | ||||||
|     def test_choices_validation_documents(self): |     def test_choices_validation_documents(self): | ||||||
|         """ |         """ | ||||||
|         Ensure fields with document choices validate given a valid choice. |         Ensure fields with document choices validate given a valid choice. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class UserComments(EmbeddedDocument): |         class UserComments(EmbeddedDocument): | ||||||
|             author = StringField() |             author = StringField() | ||||||
|             message = StringField() |             message = StringField() | ||||||
|  |  | ||||||
|         class BlogPost(Document): |         class BlogPost(Document): | ||||||
|             comments = ListField( |             comments = ListField(GenericEmbeddedDocumentField(choices=(UserComments,))) | ||||||
|                 GenericEmbeddedDocumentField(choices=(UserComments,)) |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         # Ensure Validation Passes |         # Ensure Validation Passes | ||||||
|         BlogPost(comments=[ |         BlogPost(comments=[UserComments(author="user2", message="message2")]).save() | ||||||
|             UserComments(author='user2', message='message2'), |  | ||||||
|         ]).save() |  | ||||||
|  |  | ||||||
|     def test_choices_validation_documents_invalid(self): |     def test_choices_validation_documents_invalid(self): | ||||||
|         """ |         """ | ||||||
|         Ensure fields with document choices validate given an invalid choice. |         Ensure fields with document choices validate given an invalid choice. | ||||||
|         This should throw a ValidationError exception. |         This should throw a ValidationError exception. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class UserComments(EmbeddedDocument): |         class UserComments(EmbeddedDocument): | ||||||
|             author = StringField() |             author = StringField() | ||||||
|             message = StringField() |             message = StringField() | ||||||
| @@ -250,31 +263,30 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): | |||||||
|             message = StringField() |             message = StringField() | ||||||
|  |  | ||||||
|         class BlogPost(Document): |         class BlogPost(Document): | ||||||
|             comments = ListField( |             comments = ListField(GenericEmbeddedDocumentField(choices=(UserComments,))) | ||||||
|                 GenericEmbeddedDocumentField(choices=(UserComments,)) |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         # Single Entry Failure |         # Single Entry Failure | ||||||
|         post = BlogPost(comments=[ |         post = BlogPost(comments=[ModeratorComments(author="mod1", message="message1")]) | ||||||
|             ModeratorComments(author='mod1', message='message1'), |         with pytest.raises(ValidationError): | ||||||
|         ]) |             post.save() | ||||||
|         self.assertRaises(ValidationError, post.save) |  | ||||||
|  |  | ||||||
|         # Mixed Entry Failure |         # Mixed Entry Failure | ||||||
|         post = BlogPost(comments=[ |         post = BlogPost( | ||||||
|             ModeratorComments(author='mod1', message='message1'), |             comments=[ | ||||||
|             UserComments(author='user2', message='message2'), |                 ModeratorComments(author="mod1", message="message1"), | ||||||
|         ]) |                 UserComments(author="user2", message="message2"), | ||||||
|         self.assertRaises(ValidationError, post.save) |             ] | ||||||
|  |         ) | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             post.save() | ||||||
|  |  | ||||||
|     def test_choices_validation_documents_inheritance(self): |     def test_choices_validation_documents_inheritance(self): | ||||||
|         """ |         """ | ||||||
|         Ensure fields with document choices validate given subclass of choice. |         Ensure fields with document choices validate given subclass of choice. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class Comments(EmbeddedDocument): |         class Comments(EmbeddedDocument): | ||||||
|             meta = { |             meta = {"abstract": True} | ||||||
|                 'abstract': True |  | ||||||
|             } |  | ||||||
|             author = StringField() |             author = StringField() | ||||||
|             message = StringField() |             message = StringField() | ||||||
|  |  | ||||||
| @@ -282,14 +294,10 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): | |||||||
|             pass |             pass | ||||||
|  |  | ||||||
|         class BlogPost(Document): |         class BlogPost(Document): | ||||||
|             comments = ListField( |             comments = ListField(GenericEmbeddedDocumentField(choices=(Comments,))) | ||||||
|                 GenericEmbeddedDocumentField(choices=(Comments,)) |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         # Save Valid EmbeddedDocument Type |         # Save Valid EmbeddedDocument Type | ||||||
|         BlogPost(comments=[ |         BlogPost(comments=[UserComments(author="user2", message="message2")]).save() | ||||||
|             UserComments(author='user2', message='message2'), |  | ||||||
|         ]).save() |  | ||||||
|  |  | ||||||
|     def test_query_generic_embedded_document_attribute(self): |     def test_query_generic_embedded_document_attribute(self): | ||||||
|         class AdminSettings(EmbeddedDocument): |         class AdminSettings(EmbeddedDocument): | ||||||
| @@ -299,28 +307,30 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): | |||||||
|             foo2 = StringField() |             foo2 = StringField() | ||||||
|  |  | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             settings = GenericEmbeddedDocumentField(choices=(AdminSettings, NonAdminSettings)) |             settings = GenericEmbeddedDocumentField( | ||||||
|  |                 choices=(AdminSettings, NonAdminSettings) | ||||||
|  |             ) | ||||||
|  |  | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|  |  | ||||||
|         p1 = Person(settings=AdminSettings(foo1='bar1')).save() |         p1 = Person(settings=AdminSettings(foo1="bar1")).save() | ||||||
|         p2 = Person(settings=NonAdminSettings(foo2='bar2')).save() |         p2 = Person(settings=NonAdminSettings(foo2="bar2")).save() | ||||||
|  |  | ||||||
|         # Test non exiting attribute |         # Test non exiting attribute | ||||||
|         with self.assertRaises(InvalidQueryError) as ctx_err: |         with pytest.raises(InvalidQueryError) as exc_info: | ||||||
|             Person.objects(settings__notexist='bar').first() |             Person.objects(settings__notexist="bar").first() | ||||||
|         self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"') |         assert str(exc_info.value) == u'Cannot resolve field "notexist"' | ||||||
|  |  | ||||||
|         with self.assertRaises(LookUpError): |         with pytest.raises(LookUpError): | ||||||
|             Person.objects.only('settings.notexist') |             Person.objects.only("settings.notexist") | ||||||
|  |  | ||||||
|         # Test existing attribute |         # Test existing attribute | ||||||
|         self.assertEqual(Person.objects(settings__foo1='bar1').first().id, p1.id) |         assert Person.objects(settings__foo1="bar1").first().id == p1.id | ||||||
|         self.assertEqual(Person.objects(settings__foo2='bar2').first().id, p2.id) |         assert Person.objects(settings__foo2="bar2").first().id == p2.id | ||||||
|  |  | ||||||
|     def test_query_generic_embedded_document_attribute_with_inheritance(self): |     def test_query_generic_embedded_document_attribute_with_inheritance(self): | ||||||
|         class BaseSettings(EmbeddedDocument): |         class BaseSettings(EmbeddedDocument): | ||||||
|             meta = {'allow_inheritance': True} |             meta = {"allow_inheritance": True} | ||||||
|             base_foo = StringField() |             base_foo = StringField() | ||||||
|  |  | ||||||
|         class AdminSettings(BaseSettings): |         class AdminSettings(BaseSettings): | ||||||
| @@ -331,14 +341,14 @@ class TestGenericEmbeddedDocumentField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|  |  | ||||||
|         p = Person(settings=AdminSettings(base_foo='basefoo', sub_foo='subfoo')) |         p = Person(settings=AdminSettings(base_foo="basefoo", sub_foo="subfoo")) | ||||||
|         p.save() |         p.save() | ||||||
|  |  | ||||||
|         # Test non exiting attribute |         # Test non exiting attribute | ||||||
|         with self.assertRaises(InvalidQueryError) as ctx_err: |         with pytest.raises(InvalidQueryError) as exc_info: | ||||||
|             self.assertEqual(Person.objects(settings__notexist='bar').first().id, p.id) |             assert Person.objects(settings__notexist="bar").first().id == p.id | ||||||
|         self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"') |         assert str(exc_info.value) == u'Cannot resolve field "notexist"' | ||||||
|  |  | ||||||
|         # Test existing attribute |         # Test existing attribute | ||||||
|         self.assertEqual(Person.objects(settings__base_foo='basefoo').first().id, p.id) |         assert Person.objects(settings__base_foo="basefoo").first().id == p.id | ||||||
|         self.assertEqual(Person.objects(settings__sub_foo='subfoo').first().id, p.id) |         assert Person.objects(settings__sub_foo="subfoo").first().id == p.id | ||||||
|   | |||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,49 +1,51 @@ | |||||||
| # -*- coding: utf-8 -*- | # -*- coding: utf-8 -*- | ||||||
| import copy | import copy | ||||||
| import os | import os | ||||||
| import unittest |  | ||||||
| import tempfile | import tempfile | ||||||
|  | import unittest | ||||||
|  | from io import BytesIO | ||||||
| 
 | 
 | ||||||
| import gridfs | import gridfs | ||||||
| import six | import pytest | ||||||
| 
 | 
 | ||||||
| from nose.plugins.skip import SkipTest |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| from mongoengine.connection import get_db | from mongoengine.connection import get_db | ||||||
| from mongoengine.python_support import StringIO |  | ||||||
| 
 | 
 | ||||||
| try: | try: | ||||||
|     from PIL import Image |     from PIL import Image | ||||||
|  | 
 | ||||||
|     HAS_PIL = True |     HAS_PIL = True | ||||||
| except ImportError: | except ImportError: | ||||||
|     HAS_PIL = False |     HAS_PIL = False | ||||||
| 
 | 
 | ||||||
| from tests.utils import MongoDBTestCase | from tests.utils import MongoDBTestCase | ||||||
| 
 | 
 | ||||||
| TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') | require_pil = pytest.mark.skipif(not HAS_PIL, reason="PIL not installed") | ||||||
| TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png') | 
 | ||||||
|  | TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), "mongoengine.png") | ||||||
|  | TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), "mongodb_leaf.png") | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def get_file(path): | def get_file(path): | ||||||
|     """Use a BytesIO instead of a file to allow |     """Use a BytesIO instead of a file to allow | ||||||
|     to have a one-liner and avoid that the file remains opened""" |     to have a one-liner and avoid that the file remains opened""" | ||||||
|     bytes_io = StringIO() |     bytes_io = BytesIO() | ||||||
|     with open(path, 'rb') as f: |     with open(path, "rb") as f: | ||||||
|         bytes_io.write(f.read()) |         bytes_io.write(f.read()) | ||||||
|     bytes_io.seek(0) |     bytes_io.seek(0) | ||||||
|     return bytes_io |     return bytes_io | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class FileTest(MongoDBTestCase): | class TestFileField(MongoDBTestCase): | ||||||
| 
 |  | ||||||
|     def tearDown(self): |     def tearDown(self): | ||||||
|         self.db.drop_collection('fs.files') |         self.db.drop_collection("fs.files") | ||||||
|         self.db.drop_collection('fs.chunks') |         self.db.drop_collection("fs.chunks") | ||||||
| 
 | 
 | ||||||
|     def test_file_field_optional(self): |     def test_file_field_optional(self): | ||||||
|         # Make sure FileField is optional and not required |         # Make sure FileField is optional and not required | ||||||
|         class DemoFile(Document): |         class DemoFile(Document): | ||||||
|             the_file = FileField() |             the_file = FileField() | ||||||
|  | 
 | ||||||
|         DemoFile.objects.create() |         DemoFile.objects.create() | ||||||
| 
 | 
 | ||||||
|     def test_file_fields(self): |     def test_file_fields(self): | ||||||
| @@ -55,18 +57,21 @@ class FileTest(MongoDBTestCase): | |||||||
| 
 | 
 | ||||||
|         PutFile.drop_collection() |         PutFile.drop_collection() | ||||||
| 
 | 
 | ||||||
|         text = six.b('Hello, World!') |         text = "Hello, World!".encode("latin-1") | ||||||
|         content_type = 'text/plain' |         content_type = "text/plain" | ||||||
| 
 | 
 | ||||||
|         putfile = PutFile() |         putfile = PutFile() | ||||||
|         putfile.the_file.put(text, content_type=content_type, filename="hello") |         putfile.the_file.put(text, content_type=content_type, filename="hello") | ||||||
|         putfile.save() |         putfile.save() | ||||||
| 
 | 
 | ||||||
|         result = PutFile.objects.first() |         result = PutFile.objects.first() | ||||||
|         self.assertEqual(putfile, result) |         assert putfile == result | ||||||
|         self.assertEqual("%s" % result.the_file, "<GridFSProxy: hello (%s)>" % result.the_file.grid_id) |         assert ( | ||||||
|         self.assertEqual(result.the_file.read(), text) |             "%s" % result.the_file | ||||||
|         self.assertEqual(result.the_file.content_type, content_type) |             == "<GridFSProxy: hello (%s)>" % result.the_file.grid_id | ||||||
|  |         ) | ||||||
|  |         assert result.the_file.read() == text | ||||||
|  |         assert result.the_file.content_type == content_type | ||||||
|         result.the_file.delete()  # Remove file from GridFS |         result.the_file.delete()  # Remove file from GridFS | ||||||
|         PutFile.objects.delete() |         PutFile.objects.delete() | ||||||
| 
 | 
 | ||||||
| @@ -74,29 +79,30 @@ class FileTest(MongoDBTestCase): | |||||||
|         PutFile.drop_collection() |         PutFile.drop_collection() | ||||||
| 
 | 
 | ||||||
|         putfile = PutFile() |         putfile = PutFile() | ||||||
|         putstring = StringIO() |         putstring = BytesIO() | ||||||
|         putstring.write(text) |         putstring.write(text) | ||||||
|         putstring.seek(0) |         putstring.seek(0) | ||||||
|         putfile.the_file.put(putstring, content_type=content_type) |         putfile.the_file.put(putstring, content_type=content_type) | ||||||
|         putfile.save() |         putfile.save() | ||||||
| 
 | 
 | ||||||
|         result = PutFile.objects.first() |         result = PutFile.objects.first() | ||||||
|         self.assertEqual(putfile, result) |         assert putfile == result | ||||||
|         self.assertEqual(result.the_file.read(), text) |         assert result.the_file.read() == text | ||||||
|         self.assertEqual(result.the_file.content_type, content_type) |         assert result.the_file.content_type == content_type | ||||||
|         result.the_file.delete() |         result.the_file.delete() | ||||||
| 
 | 
 | ||||||
|     def test_file_fields_stream(self): |     def test_file_fields_stream(self): | ||||||
|         """Ensure that file fields can be written to and their data retrieved |         """Ensure that file fields can be written to and their data retrieved | ||||||
|         """ |         """ | ||||||
|  | 
 | ||||||
|         class StreamFile(Document): |         class StreamFile(Document): | ||||||
|             the_file = FileField() |             the_file = FileField() | ||||||
| 
 | 
 | ||||||
|         StreamFile.drop_collection() |         StreamFile.drop_collection() | ||||||
| 
 | 
 | ||||||
|         text = six.b('Hello, World!') |         text = "Hello, World!".encode("latin-1") | ||||||
|         more_text = six.b('Foo Bar') |         more_text = "Foo Bar".encode("latin-1") | ||||||
|         content_type = 'text/plain' |         content_type = "text/plain" | ||||||
| 
 | 
 | ||||||
|         streamfile = StreamFile() |         streamfile = StreamFile() | ||||||
|         streamfile.the_file.new_file(content_type=content_type) |         streamfile.the_file.new_file(content_type=content_type) | ||||||
| @@ -106,32 +112,32 @@ class FileTest(MongoDBTestCase): | |||||||
|         streamfile.save() |         streamfile.save() | ||||||
| 
 | 
 | ||||||
|         result = StreamFile.objects.first() |         result = StreamFile.objects.first() | ||||||
|         self.assertEqual(streamfile, result) |         assert streamfile == result | ||||||
|         self.assertEqual(result.the_file.read(), text + more_text) |         assert result.the_file.read() == text + more_text | ||||||
|         self.assertEqual(result.the_file.content_type, content_type) |         assert result.the_file.content_type == content_type | ||||||
|         result.the_file.seek(0) |         result.the_file.seek(0) | ||||||
|         self.assertEqual(result.the_file.tell(), 0) |         assert result.the_file.tell() == 0 | ||||||
|         self.assertEqual(result.the_file.read(len(text)), text) |         assert result.the_file.read(len(text)) == text | ||||||
|         self.assertEqual(result.the_file.tell(), len(text)) |         assert result.the_file.tell() == len(text) | ||||||
|         self.assertEqual(result.the_file.read(len(more_text)), more_text) |         assert result.the_file.read(len(more_text)) == more_text | ||||||
|         self.assertEqual(result.the_file.tell(), len(text + more_text)) |         assert result.the_file.tell() == len(text + more_text) | ||||||
|         result.the_file.delete() |         result.the_file.delete() | ||||||
| 
 | 
 | ||||||
|         # Ensure deleted file returns None |         # Ensure deleted file returns None | ||||||
|         self.assertTrue(result.the_file.read() is None) |         assert result.the_file.read() is None | ||||||
| 
 | 
 | ||||||
|     def test_file_fields_stream_after_none(self): |     def test_file_fields_stream_after_none(self): | ||||||
|         """Ensure that a file field can be written to after it has been saved as |         """Ensure that a file field can be written to after it has been saved as | ||||||
|         None |         None | ||||||
|         """ |         """ | ||||||
|  | 
 | ||||||
|         class StreamFile(Document): |         class StreamFile(Document): | ||||||
|             the_file = FileField() |             the_file = FileField() | ||||||
| 
 | 
 | ||||||
|         StreamFile.drop_collection() |         StreamFile.drop_collection() | ||||||
| 
 | 
 | ||||||
|         text = six.b('Hello, World!') |         text = "Hello, World!".encode("latin-1") | ||||||
|         more_text = six.b('Foo Bar') |         more_text = "Foo Bar".encode("latin-1") | ||||||
|         content_type = 'text/plain' |  | ||||||
| 
 | 
 | ||||||
|         streamfile = StreamFile() |         streamfile = StreamFile() | ||||||
|         streamfile.save() |         streamfile.save() | ||||||
| @@ -142,27 +148,26 @@ class FileTest(MongoDBTestCase): | |||||||
|         streamfile.save() |         streamfile.save() | ||||||
| 
 | 
 | ||||||
|         result = StreamFile.objects.first() |         result = StreamFile.objects.first() | ||||||
|         self.assertEqual(streamfile, result) |         assert streamfile == result | ||||||
|         self.assertEqual(result.the_file.read(), text + more_text) |         assert result.the_file.read() == text + more_text | ||||||
|         # self.assertEqual(result.the_file.content_type, content_type) |         # assert result.the_file.content_type == content_type | ||||||
|         result.the_file.seek(0) |         result.the_file.seek(0) | ||||||
|         self.assertEqual(result.the_file.tell(), 0) |         assert result.the_file.tell() == 0 | ||||||
|         self.assertEqual(result.the_file.read(len(text)), text) |         assert result.the_file.read(len(text)) == text | ||||||
|         self.assertEqual(result.the_file.tell(), len(text)) |         assert result.the_file.tell() == len(text) | ||||||
|         self.assertEqual(result.the_file.read(len(more_text)), more_text) |         assert result.the_file.read(len(more_text)) == more_text | ||||||
|         self.assertEqual(result.the_file.tell(), len(text + more_text)) |         assert result.the_file.tell() == len(text + more_text) | ||||||
|         result.the_file.delete() |         result.the_file.delete() | ||||||
| 
 | 
 | ||||||
|         # Ensure deleted file returns None |         # Ensure deleted file returns None | ||||||
|         self.assertTrue(result.the_file.read() is None) |         assert result.the_file.read() is None | ||||||
| 
 | 
 | ||||||
|     def test_file_fields_set(self): |     def test_file_fields_set(self): | ||||||
| 
 |  | ||||||
|         class SetFile(Document): |         class SetFile(Document): | ||||||
|             the_file = FileField() |             the_file = FileField() | ||||||
| 
 | 
 | ||||||
|         text = six.b('Hello, World!') |         text = "Hello, World!".encode("latin-1") | ||||||
|         more_text = six.b('Foo Bar') |         more_text = "Foo Bar".encode("latin-1") | ||||||
| 
 | 
 | ||||||
|         SetFile.drop_collection() |         SetFile.drop_collection() | ||||||
| 
 | 
 | ||||||
| @@ -171,27 +176,26 @@ class FileTest(MongoDBTestCase): | |||||||
|         setfile.save() |         setfile.save() | ||||||
| 
 | 
 | ||||||
|         result = SetFile.objects.first() |         result = SetFile.objects.first() | ||||||
|         self.assertEqual(setfile, result) |         assert setfile == result | ||||||
|         self.assertEqual(result.the_file.read(), text) |         assert result.the_file.read() == text | ||||||
| 
 | 
 | ||||||
|         # Try replacing file with new one |         # Try replacing file with new one | ||||||
|         result.the_file.replace(more_text) |         result.the_file.replace(more_text) | ||||||
|         result.save() |         result.save() | ||||||
| 
 | 
 | ||||||
|         result = SetFile.objects.first() |         result = SetFile.objects.first() | ||||||
|         self.assertEqual(setfile, result) |         assert setfile == result | ||||||
|         self.assertEqual(result.the_file.read(), more_text) |         assert result.the_file.read() == more_text | ||||||
|         result.the_file.delete() |         result.the_file.delete() | ||||||
| 
 | 
 | ||||||
|     def test_file_field_no_default(self): |     def test_file_field_no_default(self): | ||||||
| 
 |  | ||||||
|         class GridDocument(Document): |         class GridDocument(Document): | ||||||
|             the_file = FileField() |             the_file = FileField() | ||||||
| 
 | 
 | ||||||
|         GridDocument.drop_collection() |         GridDocument.drop_collection() | ||||||
| 
 | 
 | ||||||
|         with tempfile.TemporaryFile() as f: |         with tempfile.TemporaryFile() as f: | ||||||
|             f.write(six.b("Hello World!")) |             f.write("Hello World!".encode("latin-1")) | ||||||
|             f.flush() |             f.flush() | ||||||
| 
 | 
 | ||||||
|             # Test without default |             # Test without default | ||||||
| @@ -199,34 +203,35 @@ class FileTest(MongoDBTestCase): | |||||||
|             doc_a.save() |             doc_a.save() | ||||||
| 
 | 
 | ||||||
|             doc_b = GridDocument.objects.with_id(doc_a.id) |             doc_b = GridDocument.objects.with_id(doc_a.id) | ||||||
|             doc_b.the_file.replace(f, filename='doc_b') |             doc_b.the_file.replace(f, filename="doc_b") | ||||||
|             doc_b.save() |             doc_b.save() | ||||||
|             self.assertNotEqual(doc_b.the_file.grid_id, None) |             assert doc_b.the_file.grid_id is not None | ||||||
| 
 | 
 | ||||||
|             # Test it matches |             # Test it matches | ||||||
|             doc_c = GridDocument.objects.with_id(doc_b.id) |             doc_c = GridDocument.objects.with_id(doc_b.id) | ||||||
|             self.assertEqual(doc_b.the_file.grid_id, doc_c.the_file.grid_id) |             assert doc_b.the_file.grid_id == doc_c.the_file.grid_id | ||||||
| 
 | 
 | ||||||
|             # Test with default |             # Test with default | ||||||
|             doc_d = GridDocument(the_file=six.b('')) |             doc_d = GridDocument(the_file="".encode("latin-1")) | ||||||
|             doc_d.save() |             doc_d.save() | ||||||
| 
 | 
 | ||||||
|             doc_e = GridDocument.objects.with_id(doc_d.id) |             doc_e = GridDocument.objects.with_id(doc_d.id) | ||||||
|             self.assertEqual(doc_d.the_file.grid_id, doc_e.the_file.grid_id) |             assert doc_d.the_file.grid_id == doc_e.the_file.grid_id | ||||||
| 
 | 
 | ||||||
|             doc_e.the_file.replace(f, filename='doc_e') |             doc_e.the_file.replace(f, filename="doc_e") | ||||||
|             doc_e.save() |             doc_e.save() | ||||||
| 
 | 
 | ||||||
|             doc_f = GridDocument.objects.with_id(doc_e.id) |             doc_f = GridDocument.objects.with_id(doc_e.id) | ||||||
|             self.assertEqual(doc_e.the_file.grid_id, doc_f.the_file.grid_id) |             assert doc_e.the_file.grid_id == doc_f.the_file.grid_id | ||||||
| 
 | 
 | ||||||
|         db = GridDocument._get_db() |         db = GridDocument._get_db() | ||||||
|         grid_fs = gridfs.GridFS(db) |         grid_fs = gridfs.GridFS(db) | ||||||
|         self.assertEqual(['doc_b', 'doc_e'], grid_fs.list()) |         assert ["doc_b", "doc_e"] == grid_fs.list() | ||||||
| 
 | 
 | ||||||
|     def test_file_uniqueness(self): |     def test_file_uniqueness(self): | ||||||
|         """Ensure that each instance of a FileField is unique |         """Ensure that each instance of a FileField is unique | ||||||
|         """ |         """ | ||||||
|  | 
 | ||||||
|         class TestFile(Document): |         class TestFile(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             the_file = FileField() |             the_file = FileField() | ||||||
| @@ -234,15 +239,15 @@ class FileTest(MongoDBTestCase): | |||||||
|         # First instance |         # First instance | ||||||
|         test_file = TestFile() |         test_file = TestFile() | ||||||
|         test_file.name = "Hello, World!" |         test_file.name = "Hello, World!" | ||||||
|         test_file.the_file.put(six.b('Hello, World!')) |         test_file.the_file.put("Hello, World!".encode("latin-1")) | ||||||
|         test_file.save() |         test_file.save() | ||||||
| 
 | 
 | ||||||
|         # Second instance |         # Second instance | ||||||
|         test_file_dupe = TestFile() |         test_file_dupe = TestFile() | ||||||
|         data = test_file_dupe.the_file.read()  # Should be None |         data = test_file_dupe.the_file.read()  # Should be None | ||||||
| 
 | 
 | ||||||
|         self.assertNotEqual(test_file.name, test_file_dupe.name) |         assert test_file.name != test_file_dupe.name | ||||||
|         self.assertNotEqual(test_file.the_file.read(), data) |         assert test_file.the_file.read() != data | ||||||
| 
 | 
 | ||||||
|         TestFile.drop_collection() |         TestFile.drop_collection() | ||||||
| 
 | 
 | ||||||
| @@ -255,61 +260,68 @@ class FileTest(MongoDBTestCase): | |||||||
|             photo = FileField() |             photo = FileField() | ||||||
| 
 | 
 | ||||||
|         Animal.drop_collection() |         Animal.drop_collection() | ||||||
|         marmot = Animal(genus='Marmota', family='Sciuridae') |         marmot = Animal(genus="Marmota", family="Sciuridae") | ||||||
| 
 | 
 | ||||||
|         marmot_photo_content = get_file(TEST_IMAGE_PATH)  # Retrieve a photo from disk |         marmot_photo_content = get_file(TEST_IMAGE_PATH)  # Retrieve a photo from disk | ||||||
|         marmot.photo.put(marmot_photo_content, content_type='image/jpeg', foo='bar') |         marmot.photo.put(marmot_photo_content, content_type="image/jpeg", foo="bar") | ||||||
|         marmot.photo.close() |         marmot.photo.close() | ||||||
|         marmot.save() |         marmot.save() | ||||||
| 
 | 
 | ||||||
|         marmot = Animal.objects.get() |         marmot = Animal.objects.get() | ||||||
|         self.assertEqual(marmot.photo.content_type, 'image/jpeg') |         assert marmot.photo.content_type == "image/jpeg" | ||||||
|         self.assertEqual(marmot.photo.foo, 'bar') |         assert marmot.photo.foo == "bar" | ||||||
| 
 | 
 | ||||||
|     def test_file_reassigning(self): |     def test_file_reassigning(self): | ||||||
|         class TestFile(Document): |         class TestFile(Document): | ||||||
|             the_file = FileField() |             the_file = FileField() | ||||||
|  | 
 | ||||||
|         TestFile.drop_collection() |         TestFile.drop_collection() | ||||||
| 
 | 
 | ||||||
|         test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save() |         test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save() | ||||||
|         self.assertEqual(test_file.the_file.get().length, 8313) |         assert test_file.the_file.get().length == 8313 | ||||||
| 
 | 
 | ||||||
|         test_file = TestFile.objects.first() |         test_file = TestFile.objects.first() | ||||||
|         test_file.the_file = get_file(TEST_IMAGE2_PATH) |         test_file.the_file = get_file(TEST_IMAGE2_PATH) | ||||||
|         test_file.save() |         test_file.save() | ||||||
|         self.assertEqual(test_file.the_file.get().length, 4971) |         assert test_file.the_file.get().length == 4971 | ||||||
| 
 | 
 | ||||||
|     def test_file_boolean(self): |     def test_file_boolean(self): | ||||||
|         """Ensure that a boolean test of a FileField indicates its presence |         """Ensure that a boolean test of a FileField indicates its presence | ||||||
|         """ |         """ | ||||||
|  | 
 | ||||||
|         class TestFile(Document): |         class TestFile(Document): | ||||||
|             the_file = FileField() |             the_file = FileField() | ||||||
|  | 
 | ||||||
|         TestFile.drop_collection() |         TestFile.drop_collection() | ||||||
| 
 | 
 | ||||||
|         test_file = TestFile() |         test_file = TestFile() | ||||||
|         self.assertFalse(bool(test_file.the_file)) |         assert not bool(test_file.the_file) | ||||||
|         test_file.the_file.put(six.b('Hello, World!'), content_type='text/plain') |         test_file.the_file.put( | ||||||
|  |             "Hello, World!".encode("latin-1"), content_type="text/plain" | ||||||
|  |         ) | ||||||
|         test_file.save() |         test_file.save() | ||||||
|         self.assertTrue(bool(test_file.the_file)) |         assert bool(test_file.the_file) | ||||||
| 
 | 
 | ||||||
|         test_file = TestFile.objects.first() |         test_file = TestFile.objects.first() | ||||||
|         self.assertEqual(test_file.the_file.content_type, "text/plain") |         assert test_file.the_file.content_type == "text/plain" | ||||||
| 
 | 
 | ||||||
|     def test_file_cmp(self): |     def test_file_cmp(self): | ||||||
|         """Test comparing against other types""" |         """Test comparing against other types""" | ||||||
|  | 
 | ||||||
|         class TestFile(Document): |         class TestFile(Document): | ||||||
|             the_file = FileField() |             the_file = FileField() | ||||||
| 
 | 
 | ||||||
|         test_file = TestFile() |         test_file = TestFile() | ||||||
|         self.assertNotIn(test_file.the_file, [{"test": 1}]) |         assert test_file.the_file not in [{"test": 1}] | ||||||
| 
 | 
 | ||||||
|     def test_file_disk_space(self): |     def test_file_disk_space(self): | ||||||
|         """ Test disk space usage when we delete/replace a file """ |         """ Test disk space usage when we delete/replace a file """ | ||||||
|  | 
 | ||||||
|         class TestFile(Document): |         class TestFile(Document): | ||||||
|             the_file = FileField() |             the_file = FileField() | ||||||
| 
 | 
 | ||||||
|         text = six.b('Hello, World!') |         text = "Hello, World!".encode("latin-1") | ||||||
|         content_type = 'text/plain' |         content_type = "text/plain" | ||||||
| 
 | 
 | ||||||
|         testfile = TestFile() |         testfile = TestFile() | ||||||
|         testfile.the_file.put(text, content_type=content_type, filename="hello") |         testfile.the_file.put(text, content_type=content_type, filename="hello") | ||||||
| @@ -320,16 +332,16 @@ class FileTest(MongoDBTestCase): | |||||||
| 
 | 
 | ||||||
|         files = db.fs.files.find() |         files = db.fs.files.find() | ||||||
|         chunks = db.fs.chunks.find() |         chunks = db.fs.chunks.find() | ||||||
|         self.assertEqual(len(list(files)), 1) |         assert len(list(files)) == 1 | ||||||
|         self.assertEqual(len(list(chunks)), 1) |         assert len(list(chunks)) == 1 | ||||||
| 
 | 
 | ||||||
|         # Deleting the docoument should delete the files |         # Deleting the docoument should delete the files | ||||||
|         testfile.delete() |         testfile.delete() | ||||||
| 
 | 
 | ||||||
|         files = db.fs.files.find() |         files = db.fs.files.find() | ||||||
|         chunks = db.fs.chunks.find() |         chunks = db.fs.chunks.find() | ||||||
|         self.assertEqual(len(list(files)), 0) |         assert len(list(files)) == 0 | ||||||
|         self.assertEqual(len(list(chunks)), 0) |         assert len(list(chunks)) == 0 | ||||||
| 
 | 
 | ||||||
|         # Test case where we don't store a file in the first place |         # Test case where we don't store a file in the first place | ||||||
|         testfile = TestFile() |         testfile = TestFile() | ||||||
| @@ -337,48 +349,46 @@ class FileTest(MongoDBTestCase): | |||||||
| 
 | 
 | ||||||
|         files = db.fs.files.find() |         files = db.fs.files.find() | ||||||
|         chunks = db.fs.chunks.find() |         chunks = db.fs.chunks.find() | ||||||
|         self.assertEqual(len(list(files)), 0) |         assert len(list(files)) == 0 | ||||||
|         self.assertEqual(len(list(chunks)), 0) |         assert len(list(chunks)) == 0 | ||||||
| 
 | 
 | ||||||
|         testfile.delete() |         testfile.delete() | ||||||
| 
 | 
 | ||||||
|         files = db.fs.files.find() |         files = db.fs.files.find() | ||||||
|         chunks = db.fs.chunks.find() |         chunks = db.fs.chunks.find() | ||||||
|         self.assertEqual(len(list(files)), 0) |         assert len(list(files)) == 0 | ||||||
|         self.assertEqual(len(list(chunks)), 0) |         assert len(list(chunks)) == 0 | ||||||
| 
 | 
 | ||||||
|         # Test case where we overwrite the file |         # Test case where we overwrite the file | ||||||
|         testfile = TestFile() |         testfile = TestFile() | ||||||
|         testfile.the_file.put(text, content_type=content_type, filename="hello") |         testfile.the_file.put(text, content_type=content_type, filename="hello") | ||||||
|         testfile.save() |         testfile.save() | ||||||
| 
 | 
 | ||||||
|         text = six.b('Bonjour, World!') |         text = "Bonjour, World!".encode("latin-1") | ||||||
|         testfile.the_file.replace(text, content_type=content_type, filename="hello") |         testfile.the_file.replace(text, content_type=content_type, filename="hello") | ||||||
|         testfile.save() |         testfile.save() | ||||||
| 
 | 
 | ||||||
|         files = db.fs.files.find() |         files = db.fs.files.find() | ||||||
|         chunks = db.fs.chunks.find() |         chunks = db.fs.chunks.find() | ||||||
|         self.assertEqual(len(list(files)), 1) |         assert len(list(files)) == 1 | ||||||
|         self.assertEqual(len(list(chunks)), 1) |         assert len(list(chunks)) == 1 | ||||||
| 
 | 
 | ||||||
|         testfile.delete() |         testfile.delete() | ||||||
| 
 | 
 | ||||||
|         files = db.fs.files.find() |         files = db.fs.files.find() | ||||||
|         chunks = db.fs.chunks.find() |         chunks = db.fs.chunks.find() | ||||||
|         self.assertEqual(len(list(files)), 0) |         assert len(list(files)) == 0 | ||||||
|         self.assertEqual(len(list(chunks)), 0) |         assert len(list(chunks)) == 0 | ||||||
| 
 | 
 | ||||||
|  |     @require_pil | ||||||
|     def test_image_field(self): |     def test_image_field(self): | ||||||
|         if not HAS_PIL: |  | ||||||
|             raise SkipTest('PIL not installed') |  | ||||||
| 
 |  | ||||||
|         class TestImage(Document): |         class TestImage(Document): | ||||||
|             image = ImageField() |             image = ImageField() | ||||||
| 
 | 
 | ||||||
|         TestImage.drop_collection() |         TestImage.drop_collection() | ||||||
| 
 | 
 | ||||||
|         with tempfile.TemporaryFile() as f: |         with tempfile.TemporaryFile() as f: | ||||||
|             f.write(six.b("Hello World!")) |             f.write("Hello World!".encode("latin-1")) | ||||||
|             f.flush() |             f.flush() | ||||||
| 
 | 
 | ||||||
|             t = TestImage() |             t = TestImage() | ||||||
| @@ -386,7 +396,7 @@ class FileTest(MongoDBTestCase): | |||||||
|                 t.image.put(f) |                 t.image.put(f) | ||||||
|                 self.fail("Should have raised an invalidation error") |                 self.fail("Should have raised an invalidation error") | ||||||
|             except ValidationError as e: |             except ValidationError as e: | ||||||
|                 self.assertEqual("%s" % e, "Invalid image: cannot identify image file %s" % f) |                 assert "%s" % e == "Invalid image: cannot identify image file %s" % f | ||||||
| 
 | 
 | ||||||
|         t = TestImage() |         t = TestImage() | ||||||
|         t.image.put(get_file(TEST_IMAGE_PATH)) |         t.image.put(get_file(TEST_IMAGE_PATH)) | ||||||
| @@ -394,34 +404,31 @@ class FileTest(MongoDBTestCase): | |||||||
| 
 | 
 | ||||||
|         t = TestImage.objects.first() |         t = TestImage.objects.first() | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(t.image.format, 'PNG') |         assert t.image.format == "PNG" | ||||||
| 
 | 
 | ||||||
|         w, h = t.image.size |         w, h = t.image.size | ||||||
|         self.assertEqual(w, 371) |         assert w == 371 | ||||||
|         self.assertEqual(h, 76) |         assert h == 76 | ||||||
| 
 | 
 | ||||||
|         t.image.delete() |         t.image.delete() | ||||||
| 
 | 
 | ||||||
|  |     @require_pil | ||||||
|     def test_image_field_reassigning(self): |     def test_image_field_reassigning(self): | ||||||
|         if not HAS_PIL: |  | ||||||
|             raise SkipTest('PIL not installed') |  | ||||||
| 
 |  | ||||||
|         class TestFile(Document): |         class TestFile(Document): | ||||||
|             the_file = ImageField() |             the_file = ImageField() | ||||||
|  | 
 | ||||||
|         TestFile.drop_collection() |         TestFile.drop_collection() | ||||||
| 
 | 
 | ||||||
|         test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save() |         test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save() | ||||||
|         self.assertEqual(test_file.the_file.size, (371, 76)) |         assert test_file.the_file.size == (371, 76) | ||||||
| 
 | 
 | ||||||
|         test_file = TestFile.objects.first() |         test_file = TestFile.objects.first() | ||||||
|         test_file.the_file = get_file(TEST_IMAGE2_PATH) |         test_file.the_file = get_file(TEST_IMAGE2_PATH) | ||||||
|         test_file.save() |         test_file.save() | ||||||
|         self.assertEqual(test_file.the_file.size, (45, 101)) |         assert test_file.the_file.size == (45, 101) | ||||||
| 
 | 
 | ||||||
|  |     @require_pil | ||||||
|     def test_image_field_resize(self): |     def test_image_field_resize(self): | ||||||
|         if not HAS_PIL: |  | ||||||
|             raise SkipTest('PIL not installed') |  | ||||||
| 
 |  | ||||||
|         class TestImage(Document): |         class TestImage(Document): | ||||||
|             image = ImageField(size=(185, 37)) |             image = ImageField(size=(185, 37)) | ||||||
| 
 | 
 | ||||||
| @@ -433,18 +440,16 @@ class FileTest(MongoDBTestCase): | |||||||
| 
 | 
 | ||||||
|         t = TestImage.objects.first() |         t = TestImage.objects.first() | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(t.image.format, 'PNG') |         assert t.image.format == "PNG" | ||||||
|         w, h = t.image.size |         w, h = t.image.size | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(w, 185) |         assert w == 185 | ||||||
|         self.assertEqual(h, 37) |         assert h == 37 | ||||||
| 
 | 
 | ||||||
|         t.image.delete() |         t.image.delete() | ||||||
| 
 | 
 | ||||||
|  |     @require_pil | ||||||
|     def test_image_field_resize_force(self): |     def test_image_field_resize_force(self): | ||||||
|         if not HAS_PIL: |  | ||||||
|             raise SkipTest('PIL not installed') |  | ||||||
| 
 |  | ||||||
|         class TestImage(Document): |         class TestImage(Document): | ||||||
|             image = ImageField(size=(185, 37, True)) |             image = ImageField(size=(185, 37, True)) | ||||||
| 
 | 
 | ||||||
| @@ -456,18 +461,16 @@ class FileTest(MongoDBTestCase): | |||||||
| 
 | 
 | ||||||
|         t = TestImage.objects.first() |         t = TestImage.objects.first() | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(t.image.format, 'PNG') |         assert t.image.format == "PNG" | ||||||
|         w, h = t.image.size |         w, h = t.image.size | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(w, 185) |         assert w == 185 | ||||||
|         self.assertEqual(h, 37) |         assert h == 37 | ||||||
| 
 | 
 | ||||||
|         t.image.delete() |         t.image.delete() | ||||||
| 
 | 
 | ||||||
|  |     @require_pil | ||||||
|     def test_image_field_thumbnail(self): |     def test_image_field_thumbnail(self): | ||||||
|         if not HAS_PIL: |  | ||||||
|             raise SkipTest('PIL not installed') |  | ||||||
| 
 |  | ||||||
|         class TestImage(Document): |         class TestImage(Document): | ||||||
|             image = ImageField(thumbnail_size=(92, 18)) |             image = ImageField(thumbnail_size=(92, 18)) | ||||||
| 
 | 
 | ||||||
| @@ -479,19 +482,18 @@ class FileTest(MongoDBTestCase): | |||||||
| 
 | 
 | ||||||
|         t = TestImage.objects.first() |         t = TestImage.objects.first() | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(t.image.thumbnail.format, 'PNG') |         assert t.image.thumbnail.format == "PNG" | ||||||
|         self.assertEqual(t.image.thumbnail.width, 92) |         assert t.image.thumbnail.width == 92 | ||||||
|         self.assertEqual(t.image.thumbnail.height, 18) |         assert t.image.thumbnail.height == 18 | ||||||
| 
 | 
 | ||||||
|         t.image.delete() |         t.image.delete() | ||||||
| 
 | 
 | ||||||
|     def test_file_multidb(self): |     def test_file_multidb(self): | ||||||
|         register_connection('test_files', 'test_files') |         register_connection("test_files", "test_files") | ||||||
| 
 | 
 | ||||||
|         class TestFile(Document): |         class TestFile(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             the_file = FileField(db_alias="test_files", |             the_file = FileField(db_alias="test_files", collection_name="macumba") | ||||||
|                                  collection_name="macumba") |  | ||||||
| 
 | 
 | ||||||
|         TestFile.drop_collection() |         TestFile.drop_collection() | ||||||
| 
 | 
 | ||||||
| @@ -502,23 +504,21 @@ class FileTest(MongoDBTestCase): | |||||||
|         # First instance |         # First instance | ||||||
|         test_file = TestFile() |         test_file = TestFile() | ||||||
|         test_file.name = "Hello, World!" |         test_file.name = "Hello, World!" | ||||||
|         test_file.the_file.put(six.b('Hello, World!'), |         test_file.the_file.put("Hello, World!".encode("latin-1"), name="hello.txt") | ||||||
|                           name="hello.txt") |  | ||||||
|         test_file.save() |         test_file.save() | ||||||
| 
 | 
 | ||||||
|         data = get_db("test_files").macumba.files.find_one() |         data = get_db("test_files").macumba.files.find_one() | ||||||
|         self.assertEqual(data.get('name'), 'hello.txt') |         assert data.get("name") == "hello.txt" | ||||||
| 
 | 
 | ||||||
|         test_file = TestFile.objects.first() |         test_file = TestFile.objects.first() | ||||||
|         self.assertEqual(test_file.the_file.read(), six.b('Hello, World!')) |         assert test_file.the_file.read() == "Hello, World!".encode("latin-1") | ||||||
| 
 | 
 | ||||||
|         test_file = TestFile.objects.first() |         test_file = TestFile.objects.first() | ||||||
|         test_file.the_file = six.b('HELLO, WORLD!') |         test_file.the_file = "Hello, World!".encode("latin-1") | ||||||
|         test_file.save() |         test_file.save() | ||||||
| 
 | 
 | ||||||
|         test_file = TestFile.objects.first() |         test_file = TestFile.objects.first() | ||||||
|         self.assertEqual(test_file.the_file.read(), |         assert test_file.the_file.read() == "Hello, World!".encode("latin-1") | ||||||
|                          six.b('HELLO, WORLD!')) |  | ||||||
| 
 | 
 | ||||||
|     def test_copyable(self): |     def test_copyable(self): | ||||||
|         class PutFile(Document): |         class PutFile(Document): | ||||||
| @@ -526,8 +526,8 @@ class FileTest(MongoDBTestCase): | |||||||
| 
 | 
 | ||||||
|         PutFile.drop_collection() |         PutFile.drop_collection() | ||||||
| 
 | 
 | ||||||
|         text = six.b('Hello, World!') |         text = "Hello, World!".encode("latin-1") | ||||||
|         content_type = 'text/plain' |         content_type = "text/plain" | ||||||
| 
 | 
 | ||||||
|         putfile = PutFile() |         putfile = PutFile() | ||||||
|         putfile.the_file.put(text, content_type=content_type) |         putfile.the_file.put(text, content_type=content_type) | ||||||
| @@ -536,14 +536,11 @@ class FileTest(MongoDBTestCase): | |||||||
|         class TestFile(Document): |         class TestFile(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(putfile, copy.copy(putfile)) |         assert putfile == copy.copy(putfile) | ||||||
|         self.assertEqual(putfile, copy.deepcopy(putfile)) |         assert putfile == copy.deepcopy(putfile) | ||||||
| 
 | 
 | ||||||
|  |     @require_pil | ||||||
|     def test_get_image_by_grid_id(self): |     def test_get_image_by_grid_id(self): | ||||||
| 
 |  | ||||||
|         if not HAS_PIL: |  | ||||||
|             raise SkipTest('PIL not installed') |  | ||||||
| 
 |  | ||||||
|         class TestImage(Document): |         class TestImage(Document): | ||||||
| 
 | 
 | ||||||
|             image1 = ImageField() |             image1 = ImageField() | ||||||
| @@ -559,8 +556,7 @@ class FileTest(MongoDBTestCase): | |||||||
|         test = TestImage.objects.first() |         test = TestImage.objects.first() | ||||||
|         grid_id = test.image1.grid_id |         grid_id = test.image1.grid_id | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(1, TestImage.objects(Q(image1=grid_id) |         assert 1 == TestImage.objects(Q(image1=grid_id) or Q(image2=grid_id)).count() | ||||||
|                                               or Q(image2=grid_id)).count()) |  | ||||||
| 
 | 
 | ||||||
|     def test_complex_field_filefield(self): |     def test_complex_field_filefield(self): | ||||||
|         """Ensure you can add meta data to file""" |         """Ensure you can add meta data to file""" | ||||||
| @@ -571,21 +567,21 @@ class FileTest(MongoDBTestCase): | |||||||
|             photos = ListField(FileField()) |             photos = ListField(FileField()) | ||||||
| 
 | 
 | ||||||
|         Animal.drop_collection() |         Animal.drop_collection() | ||||||
|         marmot = Animal(genus='Marmota', family='Sciuridae') |         marmot = Animal(genus="Marmota", family="Sciuridae") | ||||||
| 
 | 
 | ||||||
|         with open(TEST_IMAGE_PATH, 'rb') as marmot_photo:   # Retrieve a photo from disk |         with open(TEST_IMAGE_PATH, "rb") as marmot_photo:  # Retrieve a photo from disk | ||||||
|             photos_field = marmot._fields['photos'].field |             photos_field = marmot._fields["photos"].field | ||||||
|             new_proxy = photos_field.get_proxy_obj('photos', marmot) |             new_proxy = photos_field.get_proxy_obj("photos", marmot) | ||||||
|             new_proxy.put(marmot_photo, content_type='image/jpeg', foo='bar') |             new_proxy.put(marmot_photo, content_type="image/jpeg", foo="bar") | ||||||
| 
 | 
 | ||||||
|         marmot.photos.append(new_proxy) |         marmot.photos.append(new_proxy) | ||||||
|         marmot.save() |         marmot.save() | ||||||
| 
 | 
 | ||||||
|         marmot = Animal.objects.get() |         marmot = Animal.objects.get() | ||||||
|         self.assertEqual(marmot.photos[0].content_type, 'image/jpeg') |         assert marmot.photos[0].content_type == "image/jpeg" | ||||||
|         self.assertEqual(marmot.photos[0].foo, 'bar') |         assert marmot.photos[0].foo == "bar" | ||||||
|         self.assertEqual(marmot.photos[0].get().length, 8313) |         assert marmot.photos[0].get().length == 8313 | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| if __name__ == '__main__': | if __name__ == "__main__": | ||||||
|     unittest.main() |     unittest.main() | ||||||
| @@ -1,5 +1,5 @@ | |||||||
| # -*- coding: utf-8 -*- | # -*- coding: utf-8 -*- | ||||||
| import six | import pytest | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
|  |  | ||||||
| @@ -7,7 +7,6 @@ from tests.utils import MongoDBTestCase | |||||||
|  |  | ||||||
|  |  | ||||||
| class TestFloatField(MongoDBTestCase): | class TestFloatField(MongoDBTestCase): | ||||||
|  |  | ||||||
|     def test_float_ne_operator(self): |     def test_float_ne_operator(self): | ||||||
|         class TestDocument(Document): |         class TestDocument(Document): | ||||||
|             float_fld = FloatField() |             float_fld = FloatField() | ||||||
| @@ -17,12 +16,13 @@ class TestFloatField(MongoDBTestCase): | |||||||
|         TestDocument(float_fld=None).save() |         TestDocument(float_fld=None).save() | ||||||
|         TestDocument(float_fld=1).save() |         TestDocument(float_fld=1).save() | ||||||
|  |  | ||||||
|         self.assertEqual(1, TestDocument.objects(float_fld__ne=None).count()) |         assert 1 == TestDocument.objects(float_fld__ne=None).count() | ||||||
|         self.assertEqual(1, TestDocument.objects(float_fld__ne=1).count()) |         assert 1 == TestDocument.objects(float_fld__ne=1).count() | ||||||
|  |  | ||||||
|     def test_validation(self): |     def test_validation(self): | ||||||
|         """Ensure that invalid values cannot be assigned to float fields. |         """Ensure that invalid values cannot be assigned to float fields. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             height = FloatField(min_value=0.1, max_value=3.5) |             height = FloatField(min_value=0.1, max_value=3.5) | ||||||
|  |  | ||||||
| @@ -33,26 +33,30 @@ class TestFloatField(MongoDBTestCase): | |||||||
|         person.height = 1.89 |         person.height = 1.89 | ||||||
|         person.validate() |         person.validate() | ||||||
|  |  | ||||||
|         person.height = '2.0' |         person.height = "2.0" | ||||||
|         self.assertRaises(ValidationError, person.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             person.validate() | ||||||
|  |  | ||||||
|         person.height = 0.01 |         person.height = 0.01 | ||||||
|         self.assertRaises(ValidationError, person.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             person.validate() | ||||||
|  |  | ||||||
|         person.height = 4.0 |         person.height = 4.0 | ||||||
|         self.assertRaises(ValidationError, person.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             person.validate() | ||||||
|  |  | ||||||
|         person_2 = Person(height='something invalid') |         person_2 = Person(height="something invalid") | ||||||
|         self.assertRaises(ValidationError, person_2.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             person_2.validate() | ||||||
|  |  | ||||||
|         big_person = BigPerson() |         big_person = BigPerson() | ||||||
|  |  | ||||||
|         for value, value_type in enumerate(six.integer_types): |         big_person.height = int(0) | ||||||
|             big_person.height = value_type(value) |         big_person.validate() | ||||||
|             big_person.validate() |  | ||||||
|  |  | ||||||
|         big_person.height = 2 ** 500 |         big_person.height = 2 ** 500 | ||||||
|         big_person.validate() |         big_person.validate() | ||||||
|  |  | ||||||
|         big_person.height = 2 ** 100000  # Too big for a float value |         big_person.height = 2 ** 100000  # Too big for a float value | ||||||
|         self.assertRaises(ValidationError, big_person.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             big_person.validate() | ||||||
|   | |||||||
| @@ -2,30 +2,23 @@ | |||||||
| import unittest | import unittest | ||||||
| 
 | 
 | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| from mongoengine.connection import get_db | from tests.utils import MongoDBTestCase | ||||||
| 
 |  | ||||||
| __all__ = ("GeoFieldTest", ) |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class GeoFieldTest(unittest.TestCase): | class TestGeoField(MongoDBTestCase): | ||||||
| 
 |  | ||||||
|     def setUp(self): |  | ||||||
|         connect(db='mongoenginetest') |  | ||||||
|         self.db = get_db() |  | ||||||
| 
 |  | ||||||
|     def _test_for_expected_error(self, Cls, loc, expected): |     def _test_for_expected_error(self, Cls, loc, expected): | ||||||
|         try: |         try: | ||||||
|             Cls(loc=loc).validate() |             Cls(loc=loc).validate() | ||||||
|             self.fail('Should not validate the location {0}'.format(loc)) |             self.fail("Should not validate the location {0}".format(loc)) | ||||||
|         except ValidationError as e: |         except ValidationError as e: | ||||||
|             self.assertEqual(expected, e.to_dict()['loc']) |             assert expected == e.to_dict()["loc"] | ||||||
| 
 | 
 | ||||||
|     def test_geopoint_validation(self): |     def test_geopoint_validation(self): | ||||||
|         class Location(Document): |         class Location(Document): | ||||||
|             loc = GeoPointField() |             loc = GeoPointField() | ||||||
| 
 | 
 | ||||||
|         invalid_coords = [{"x": 1, "y": 2}, 5, "a"] |         invalid_coords = [{"x": 1, "y": 2}, 5, "a"] | ||||||
|         expected = 'GeoPointField can only accept tuples or lists of (x, y)' |         expected = "GeoPointField can only accept tuples or lists of (x, y)" | ||||||
| 
 | 
 | ||||||
|         for coord in invalid_coords: |         for coord in invalid_coords: | ||||||
|             self._test_for_expected_error(Location, coord, expected) |             self._test_for_expected_error(Location, coord, expected) | ||||||
| @@ -40,7 +33,7 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|             expected = "Both values (%s) in point must be float or int" % repr(coord) |             expected = "Both values (%s) in point must be float or int" % repr(coord) | ||||||
|             self._test_for_expected_error(Location, coord, expected) |             self._test_for_expected_error(Location, coord, expected) | ||||||
| 
 | 
 | ||||||
|         invalid_coords = [21, 4, 'a'] |         invalid_coords = [21, 4, "a"] | ||||||
|         for coord in invalid_coords: |         for coord in invalid_coords: | ||||||
|             expected = "GeoPointField can only accept tuples or lists of (x, y)" |             expected = "GeoPointField can only accept tuples or lists of (x, y)" | ||||||
|             self._test_for_expected_error(Location, coord, expected) |             self._test_for_expected_error(Location, coord, expected) | ||||||
| @@ -50,7 +43,9 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|             loc = PointField() |             loc = PointField() | ||||||
| 
 | 
 | ||||||
|         invalid_coords = {"x": 1, "y": 2} |         invalid_coords = {"x": 1, "y": 2} | ||||||
|         expected = 'PointField can only accept a valid GeoJson dictionary or lists of (x, y)' |         expected = ( | ||||||
|  |             "PointField can only accept a valid GeoJson dictionary or lists of (x, y)" | ||||||
|  |         ) | ||||||
|         self._test_for_expected_error(Location, invalid_coords, expected) |         self._test_for_expected_error(Location, invalid_coords, expected) | ||||||
| 
 | 
 | ||||||
|         invalid_coords = {"type": "MadeUp", "coordinates": []} |         invalid_coords = {"type": "MadeUp", "coordinates": []} | ||||||
| @@ -77,19 +72,16 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|             self._test_for_expected_error(Location, coord, expected) |             self._test_for_expected_error(Location, coord, expected) | ||||||
| 
 | 
 | ||||||
|         Location(loc=[1, 2]).validate() |         Location(loc=[1, 2]).validate() | ||||||
|         Location(loc={ |         Location( | ||||||
|             "type": "Point", |             loc={"type": "Point", "coordinates": [81.4471435546875, 23.61432859499169]} | ||||||
|             "coordinates": [ |         ).validate() | ||||||
|               81.4471435546875, |  | ||||||
|               23.61432859499169 |  | ||||||
|             ]}).validate() |  | ||||||
| 
 | 
 | ||||||
|     def test_linestring_validation(self): |     def test_linestring_validation(self): | ||||||
|         class Location(Document): |         class Location(Document): | ||||||
|             loc = LineStringField() |             loc = LineStringField() | ||||||
| 
 | 
 | ||||||
|         invalid_coords = {"x": 1, "y": 2} |         invalid_coords = {"x": 1, "y": 2} | ||||||
|         expected = 'LineStringField can only accept a valid GeoJson dictionary or lists of (x, y)' |         expected = "LineStringField can only accept a valid GeoJson dictionary or lists of (x, y)" | ||||||
|         self._test_for_expected_error(Location, invalid_coords, expected) |         self._test_for_expected_error(Location, invalid_coords, expected) | ||||||
| 
 | 
 | ||||||
|         invalid_coords = {"type": "MadeUp", "coordinates": [[]]} |         invalid_coords = {"type": "MadeUp", "coordinates": [[]]} | ||||||
| @@ -97,7 +89,9 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|         self._test_for_expected_error(Location, invalid_coords, expected) |         self._test_for_expected_error(Location, invalid_coords, expected) | ||||||
| 
 | 
 | ||||||
|         invalid_coords = {"type": "LineString", "coordinates": [[1, 2, 3]]} |         invalid_coords = {"type": "LineString", "coordinates": [[1, 2, 3]]} | ||||||
|         expected = "Invalid LineString:\nValue ([1, 2, 3]) must be a two-dimensional point" |         expected = ( | ||||||
|  |             "Invalid LineString:\nValue ([1, 2, 3]) must be a two-dimensional point" | ||||||
|  |         ) | ||||||
|         self._test_for_expected_error(Location, invalid_coords, expected) |         self._test_for_expected_error(Location, invalid_coords, expected) | ||||||
| 
 | 
 | ||||||
|         invalid_coords = [5, "a"] |         invalid_coords = [5, "a"] | ||||||
| @@ -105,16 +99,25 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|         self._test_for_expected_error(Location, invalid_coords, expected) |         self._test_for_expected_error(Location, invalid_coords, expected) | ||||||
| 
 | 
 | ||||||
|         invalid_coords = [[1]] |         invalid_coords = [[1]] | ||||||
|         expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0]) |         expected = ( | ||||||
|  |             "Invalid LineString:\nValue (%s) must be a two-dimensional point" | ||||||
|  |             % repr(invalid_coords[0]) | ||||||
|  |         ) | ||||||
|         self._test_for_expected_error(Location, invalid_coords, expected) |         self._test_for_expected_error(Location, invalid_coords, expected) | ||||||
| 
 | 
 | ||||||
|         invalid_coords = [[1, 2, 3]] |         invalid_coords = [[1, 2, 3]] | ||||||
|         expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0]) |         expected = ( | ||||||
|  |             "Invalid LineString:\nValue (%s) must be a two-dimensional point" | ||||||
|  |             % repr(invalid_coords[0]) | ||||||
|  |         ) | ||||||
|         self._test_for_expected_error(Location, invalid_coords, expected) |         self._test_for_expected_error(Location, invalid_coords, expected) | ||||||
| 
 | 
 | ||||||
|         invalid_coords = [[[{}, {}]], [("a", "b")]] |         invalid_coords = [[[{}, {}]], [("a", "b")]] | ||||||
|         for coord in invalid_coords: |         for coord in invalid_coords: | ||||||
|             expected = "Invalid LineString:\nBoth values (%s) in point must be float or int" % repr(coord[0]) |             expected = ( | ||||||
|  |                 "Invalid LineString:\nBoth values (%s) in point must be float or int" | ||||||
|  |                 % repr(coord[0]) | ||||||
|  |             ) | ||||||
|             self._test_for_expected_error(Location, coord, expected) |             self._test_for_expected_error(Location, coord, expected) | ||||||
| 
 | 
 | ||||||
|         Location(loc=[[1, 2], [3, 4], [5, 6], [1, 2]]).validate() |         Location(loc=[[1, 2], [3, 4], [5, 6], [1, 2]]).validate() | ||||||
| @@ -124,7 +127,9 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|             loc = PolygonField() |             loc = PolygonField() | ||||||
| 
 | 
 | ||||||
|         invalid_coords = {"x": 1, "y": 2} |         invalid_coords = {"x": 1, "y": 2} | ||||||
|         expected = 'PolygonField can only accept a valid GeoJson dictionary or lists of (x, y)' |         expected = ( | ||||||
|  |             "PolygonField can only accept a valid GeoJson dictionary or lists of (x, y)" | ||||||
|  |         ) | ||||||
|         self._test_for_expected_error(Location, invalid_coords, expected) |         self._test_for_expected_error(Location, invalid_coords, expected) | ||||||
| 
 | 
 | ||||||
|         invalid_coords = {"type": "MadeUp", "coordinates": [[]]} |         invalid_coords = {"type": "MadeUp", "coordinates": [[]]} | ||||||
| @@ -136,7 +141,9 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|         self._test_for_expected_error(Location, invalid_coords, expected) |         self._test_for_expected_error(Location, invalid_coords, expected) | ||||||
| 
 | 
 | ||||||
|         invalid_coords = [[[5, "a"]]] |         invalid_coords = [[[5, "a"]]] | ||||||
|         expected = "Invalid Polygon:\nBoth values ([5, 'a']) in point must be float or int" |         expected = ( | ||||||
|  |             "Invalid Polygon:\nBoth values ([5, 'a']) in point must be float or int" | ||||||
|  |         ) | ||||||
|         self._test_for_expected_error(Location, invalid_coords, expected) |         self._test_for_expected_error(Location, invalid_coords, expected) | ||||||
| 
 | 
 | ||||||
|         invalid_coords = [[[]]] |         invalid_coords = [[[]]] | ||||||
| @@ -162,7 +169,7 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|             loc = MultiPointField() |             loc = MultiPointField() | ||||||
| 
 | 
 | ||||||
|         invalid_coords = {"x": 1, "y": 2} |         invalid_coords = {"x": 1, "y": 2} | ||||||
|         expected = 'MultiPointField can only accept a valid GeoJson dictionary or lists of (x, y)' |         expected = "MultiPointField can only accept a valid GeoJson dictionary or lists of (x, y)" | ||||||
|         self._test_for_expected_error(Location, invalid_coords, expected) |         self._test_for_expected_error(Location, invalid_coords, expected) | ||||||
| 
 | 
 | ||||||
|         invalid_coords = {"type": "MadeUp", "coordinates": [[]]} |         invalid_coords = {"type": "MadeUp", "coordinates": [[]]} | ||||||
| @@ -188,19 +195,19 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|             self._test_for_expected_error(Location, coord, expected) |             self._test_for_expected_error(Location, coord, expected) | ||||||
| 
 | 
 | ||||||
|         Location(loc=[[1, 2]]).validate() |         Location(loc=[[1, 2]]).validate() | ||||||
|         Location(loc={ |         Location( | ||||||
|             "type": "MultiPoint", |             loc={ | ||||||
|             "coordinates": [ |                 "type": "MultiPoint", | ||||||
|                 [1, 2], |                 "coordinates": [[1, 2], [81.4471435546875, 23.61432859499169]], | ||||||
|                 [81.4471435546875, 23.61432859499169] |             } | ||||||
|             ]}).validate() |         ).validate() | ||||||
| 
 | 
 | ||||||
|     def test_multilinestring_validation(self): |     def test_multilinestring_validation(self): | ||||||
|         class Location(Document): |         class Location(Document): | ||||||
|             loc = MultiLineStringField() |             loc = MultiLineStringField() | ||||||
| 
 | 
 | ||||||
|         invalid_coords = {"x": 1, "y": 2} |         invalid_coords = {"x": 1, "y": 2} | ||||||
|         expected = 'MultiLineStringField can only accept a valid GeoJson dictionary or lists of (x, y)' |         expected = "MultiLineStringField can only accept a valid GeoJson dictionary or lists of (x, y)" | ||||||
|         self._test_for_expected_error(Location, invalid_coords, expected) |         self._test_for_expected_error(Location, invalid_coords, expected) | ||||||
| 
 | 
 | ||||||
|         invalid_coords = {"type": "MadeUp", "coordinates": [[]]} |         invalid_coords = {"type": "MadeUp", "coordinates": [[]]} | ||||||
| @@ -216,16 +223,25 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|         self._test_for_expected_error(Location, invalid_coords, expected) |         self._test_for_expected_error(Location, invalid_coords, expected) | ||||||
| 
 | 
 | ||||||
|         invalid_coords = [[[1]]] |         invalid_coords = [[[1]]] | ||||||
|         expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0]) |         expected = ( | ||||||
|  |             "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" | ||||||
|  |             % repr(invalid_coords[0][0]) | ||||||
|  |         ) | ||||||
|         self._test_for_expected_error(Location, invalid_coords, expected) |         self._test_for_expected_error(Location, invalid_coords, expected) | ||||||
| 
 | 
 | ||||||
|         invalid_coords = [[[1, 2, 3]]] |         invalid_coords = [[[1, 2, 3]]] | ||||||
|         expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0]) |         expected = ( | ||||||
|  |             "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" | ||||||
|  |             % repr(invalid_coords[0][0]) | ||||||
|  |         ) | ||||||
|         self._test_for_expected_error(Location, invalid_coords, expected) |         self._test_for_expected_error(Location, invalid_coords, expected) | ||||||
| 
 | 
 | ||||||
|         invalid_coords = [[[[{}, {}]]], [[("a", "b")]]] |         invalid_coords = [[[[{}, {}]]], [[("a", "b")]]] | ||||||
|         for coord in invalid_coords: |         for coord in invalid_coords: | ||||||
|             expected = "Invalid MultiLineString:\nBoth values (%s) in point must be float or int" % repr(coord[0][0]) |             expected = ( | ||||||
|  |                 "Invalid MultiLineString:\nBoth values (%s) in point must be float or int" | ||||||
|  |                 % repr(coord[0][0]) | ||||||
|  |             ) | ||||||
|             self._test_for_expected_error(Location, coord, expected) |             self._test_for_expected_error(Location, coord, expected) | ||||||
| 
 | 
 | ||||||
|         Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate() |         Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate() | ||||||
| @@ -235,7 +251,7 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|             loc = MultiPolygonField() |             loc = MultiPolygonField() | ||||||
| 
 | 
 | ||||||
|         invalid_coords = {"x": 1, "y": 2} |         invalid_coords = {"x": 1, "y": 2} | ||||||
|         expected = 'MultiPolygonField can only accept a valid GeoJson dictionary or lists of (x, y)' |         expected = "MultiPolygonField can only accept a valid GeoJson dictionary or lists of (x, y)" | ||||||
|         self._test_for_expected_error(Location, invalid_coords, expected) |         self._test_for_expected_error(Location, invalid_coords, expected) | ||||||
| 
 | 
 | ||||||
|         invalid_coords = {"type": "MadeUp", "coordinates": [[]]} |         invalid_coords = {"type": "MadeUp", "coordinates": [[]]} | ||||||
| @@ -243,7 +259,9 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|         self._test_for_expected_error(Location, invalid_coords, expected) |         self._test_for_expected_error(Location, invalid_coords, expected) | ||||||
| 
 | 
 | ||||||
|         invalid_coords = {"type": "MultiPolygon", "coordinates": [[[[1, 2, 3]]]]} |         invalid_coords = {"type": "MultiPolygon", "coordinates": [[[[1, 2, 3]]]]} | ||||||
|         expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point" |         expected = ( | ||||||
|  |             "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point" | ||||||
|  |         ) | ||||||
|         self._test_for_expected_error(Location, invalid_coords, expected) |         self._test_for_expected_error(Location, invalid_coords, expected) | ||||||
| 
 | 
 | ||||||
|         invalid_coords = [[[[5, "a"]]]] |         invalid_coords = [[[[5, "a"]]]] | ||||||
| @@ -255,7 +273,9 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|         self._test_for_expected_error(Location, invalid_coords, expected) |         self._test_for_expected_error(Location, invalid_coords, expected) | ||||||
| 
 | 
 | ||||||
|         invalid_coords = [[[[1, 2, 3]]]] |         invalid_coords = [[[[1, 2, 3]]]] | ||||||
|         expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point" |         expected = ( | ||||||
|  |             "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point" | ||||||
|  |         ) | ||||||
|         self._test_for_expected_error(Location, invalid_coords, expected) |         self._test_for_expected_error(Location, invalid_coords, expected) | ||||||
| 
 | 
 | ||||||
|         invalid_coords = [[[[{}, {}]]], [[("a", "b")]]] |         invalid_coords = [[[[{}, {}]]], [[("a", "b")]]] | ||||||
| @@ -263,7 +283,9 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|         self._test_for_expected_error(Location, invalid_coords, expected) |         self._test_for_expected_error(Location, invalid_coords, expected) | ||||||
| 
 | 
 | ||||||
|         invalid_coords = [[[[1, 2], [3, 4]]]] |         invalid_coords = [[[[1, 2], [3, 4]]]] | ||||||
|         expected = "Invalid MultiPolygon:\nLineStrings must start and end at the same point" |         expected = ( | ||||||
|  |             "Invalid MultiPolygon:\nLineStrings must start and end at the same point" | ||||||
|  |         ) | ||||||
|         self._test_for_expected_error(Location, invalid_coords, expected) |         self._test_for_expected_error(Location, invalid_coords, expected) | ||||||
| 
 | 
 | ||||||
|         Location(loc=[[[[1, 2], [3, 4], [5, 6], [1, 2]]]]).validate() |         Location(loc=[[[[1, 2], [3, 4], [5, 6], [1, 2]]]]).validate() | ||||||
| @@ -271,17 +293,19 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|     def test_indexes_geopoint(self): |     def test_indexes_geopoint(self): | ||||||
|         """Ensure that indexes are created automatically for GeoPointFields. |         """Ensure that indexes are created automatically for GeoPointFields. | ||||||
|         """ |         """ | ||||||
|  | 
 | ||||||
|         class Event(Document): |         class Event(Document): | ||||||
|             title = StringField() |             title = StringField() | ||||||
|             location = GeoPointField() |             location = GeoPointField() | ||||||
| 
 | 
 | ||||||
|         geo_indicies = Event._geo_indices() |         geo_indicies = Event._geo_indices() | ||||||
|         self.assertEqual(geo_indicies, [{'fields': [('location', '2d')]}]) |         assert geo_indicies == [{"fields": [("location", "2d")]}] | ||||||
| 
 | 
 | ||||||
|     def test_geopoint_embedded_indexes(self): |     def test_geopoint_embedded_indexes(self): | ||||||
|         """Ensure that indexes are created automatically for GeoPointFields on |         """Ensure that indexes are created automatically for GeoPointFields on | ||||||
|         embedded documents. |         embedded documents. | ||||||
|         """ |         """ | ||||||
|  | 
 | ||||||
|         class Venue(EmbeddedDocument): |         class Venue(EmbeddedDocument): | ||||||
|             location = GeoPointField() |             location = GeoPointField() | ||||||
|             name = StringField() |             name = StringField() | ||||||
| @@ -291,11 +315,12 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|             venue = EmbeddedDocumentField(Venue) |             venue = EmbeddedDocumentField(Venue) | ||||||
| 
 | 
 | ||||||
|         geo_indicies = Event._geo_indices() |         geo_indicies = Event._geo_indices() | ||||||
|         self.assertEqual(geo_indicies, [{'fields': [('venue.location', '2d')]}]) |         assert geo_indicies == [{"fields": [("venue.location", "2d")]}] | ||||||
| 
 | 
 | ||||||
|     def test_indexes_2dsphere(self): |     def test_indexes_2dsphere(self): | ||||||
|         """Ensure that indexes are created automatically for GeoPointFields. |         """Ensure that indexes are created automatically for GeoPointFields. | ||||||
|         """ |         """ | ||||||
|  | 
 | ||||||
|         class Event(Document): |         class Event(Document): | ||||||
|             title = StringField() |             title = StringField() | ||||||
|             point = PointField() |             point = PointField() | ||||||
| @@ -303,13 +328,14 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|             polygon = PolygonField() |             polygon = PolygonField() | ||||||
| 
 | 
 | ||||||
|         geo_indicies = Event._geo_indices() |         geo_indicies = Event._geo_indices() | ||||||
|         self.assertIn({'fields': [('line', '2dsphere')]}, geo_indicies) |         assert {"fields": [("line", "2dsphere")]} in geo_indicies | ||||||
|         self.assertIn({'fields': [('polygon', '2dsphere')]}, geo_indicies) |         assert {"fields": [("polygon", "2dsphere")]} in geo_indicies | ||||||
|         self.assertIn({'fields': [('point', '2dsphere')]}, geo_indicies) |         assert {"fields": [("point", "2dsphere")]} in geo_indicies | ||||||
| 
 | 
 | ||||||
|     def test_indexes_2dsphere_embedded(self): |     def test_indexes_2dsphere_embedded(self): | ||||||
|         """Ensure that indexes are created automatically for GeoPointFields. |         """Ensure that indexes are created automatically for GeoPointFields. | ||||||
|         """ |         """ | ||||||
|  | 
 | ||||||
|         class Venue(EmbeddedDocument): |         class Venue(EmbeddedDocument): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             point = PointField() |             point = PointField() | ||||||
| @@ -321,12 +347,11 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|             venue = EmbeddedDocumentField(Venue) |             venue = EmbeddedDocumentField(Venue) | ||||||
| 
 | 
 | ||||||
|         geo_indicies = Event._geo_indices() |         geo_indicies = Event._geo_indices() | ||||||
|         self.assertIn({'fields': [('venue.line', '2dsphere')]}, geo_indicies) |         assert {"fields": [("venue.line", "2dsphere")]} in geo_indicies | ||||||
|         self.assertIn({'fields': [('venue.polygon', '2dsphere')]}, geo_indicies) |         assert {"fields": [("venue.polygon", "2dsphere")]} in geo_indicies | ||||||
|         self.assertIn({'fields': [('venue.point', '2dsphere')]}, geo_indicies) |         assert {"fields": [("venue.point", "2dsphere")]} in geo_indicies | ||||||
| 
 | 
 | ||||||
|     def test_geo_indexes_recursion(self): |     def test_geo_indexes_recursion(self): | ||||||
| 
 |  | ||||||
|         class Location(Document): |         class Location(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             location = GeoPointField() |             location = GeoPointField() | ||||||
| @@ -338,14 +363,14 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|         Location.drop_collection() |         Location.drop_collection() | ||||||
|         Parent.drop_collection() |         Parent.drop_collection() | ||||||
| 
 | 
 | ||||||
|         Parent(name='Berlin').save() |         Parent(name="Berlin").save() | ||||||
|         info = Parent._get_collection().index_information() |         info = Parent._get_collection().index_information() | ||||||
|         self.assertNotIn('location_2d', info) |         assert "location_2d" not in info | ||||||
|         info = Location._get_collection().index_information() |         info = Location._get_collection().index_information() | ||||||
|         self.assertIn('location_2d', info) |         assert "location_2d" in info | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(len(Parent._geo_indices()), 0) |         assert len(Parent._geo_indices()) == 0 | ||||||
|         self.assertEqual(len(Location._geo_indices()), 1) |         assert len(Location._geo_indices()) == 1 | ||||||
| 
 | 
 | ||||||
|     def test_geo_indexes_auto_index(self): |     def test_geo_indexes_auto_index(self): | ||||||
| 
 | 
 | ||||||
| @@ -354,18 +379,18 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|             location = PointField(auto_index=False) |             location = PointField(auto_index=False) | ||||||
|             datetime = DateTimeField() |             datetime = DateTimeField() | ||||||
| 
 | 
 | ||||||
|             meta = { |             meta = {"indexes": [[("location", "2dsphere"), ("datetime", 1)]]} | ||||||
|                 'indexes': [[("location", "2dsphere"), ("datetime", 1)]] |  | ||||||
|             } |  | ||||||
| 
 | 
 | ||||||
|         self.assertEqual([], Log._geo_indices()) |         assert [] == Log._geo_indices() | ||||||
| 
 | 
 | ||||||
|         Log.drop_collection() |         Log.drop_collection() | ||||||
|         Log.ensure_indexes() |         Log.ensure_indexes() | ||||||
| 
 | 
 | ||||||
|         info = Log._get_collection().index_information() |         info = Log._get_collection().index_information() | ||||||
|         self.assertEqual(info["location_2dsphere_datetime_1"]["key"], |         assert info["location_2dsphere_datetime_1"]["key"] == [ | ||||||
|                          [('location', '2dsphere'), ('datetime', 1)]) |             ("location", "2dsphere"), | ||||||
|  |             ("datetime", 1), | ||||||
|  |         ] | ||||||
| 
 | 
 | ||||||
|         # Test listing explicitly |         # Test listing explicitly | ||||||
|         class Log(Document): |         class Log(Document): | ||||||
| @@ -373,20 +398,20 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|             datetime = DateTimeField() |             datetime = DateTimeField() | ||||||
| 
 | 
 | ||||||
|             meta = { |             meta = { | ||||||
|                 'indexes': [ |                 "indexes": [{"fields": [("location", "2dsphere"), ("datetime", 1)]}] | ||||||
|                     {'fields': [("location", "2dsphere"), ("datetime", 1)]} |  | ||||||
|                 ] |  | ||||||
|             } |             } | ||||||
| 
 | 
 | ||||||
|         self.assertEqual([], Log._geo_indices()) |         assert [] == Log._geo_indices() | ||||||
| 
 | 
 | ||||||
|         Log.drop_collection() |         Log.drop_collection() | ||||||
|         Log.ensure_indexes() |         Log.ensure_indexes() | ||||||
| 
 | 
 | ||||||
|         info = Log._get_collection().index_information() |         info = Log._get_collection().index_information() | ||||||
|         self.assertEqual(info["location_2dsphere_datetime_1"]["key"], |         assert info["location_2dsphere_datetime_1"]["key"] == [ | ||||||
|                          [('location', '2dsphere'), ('datetime', 1)]) |             ("location", "2dsphere"), | ||||||
|  |             ("datetime", 1), | ||||||
|  |         ] | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| if __name__ == '__main__': | if __name__ == "__main__": | ||||||
|     unittest.main() |     unittest.main() | ||||||
| @@ -1,14 +1,16 @@ | |||||||
| # -*- coding: utf-8 -*- | # -*- coding: utf-8 -*- | ||||||
|  | import pytest | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestIntField(MongoDBTestCase): | class TestIntField(MongoDBTestCase): | ||||||
|  |  | ||||||
|     def test_int_validation(self): |     def test_int_validation(self): | ||||||
|         """Ensure that invalid values cannot be assigned to int fields. |         """Ensure that invalid values cannot be assigned to int fields. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             age = IntField(min_value=0, max_value=110) |             age = IntField(min_value=0, max_value=110) | ||||||
|  |  | ||||||
| @@ -23,11 +25,14 @@ class TestIntField(MongoDBTestCase): | |||||||
|         person.validate() |         person.validate() | ||||||
|  |  | ||||||
|         person.age = -1 |         person.age = -1 | ||||||
|         self.assertRaises(ValidationError, person.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             person.validate() | ||||||
|         person.age = 120 |         person.age = 120 | ||||||
|         self.assertRaises(ValidationError, person.validate) |         with pytest.raises(ValidationError): | ||||||
|         person.age = 'ten' |             person.validate() | ||||||
|         self.assertRaises(ValidationError, person.validate) |         person.age = "ten" | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             person.validate() | ||||||
|  |  | ||||||
|     def test_ne_operator(self): |     def test_ne_operator(self): | ||||||
|         class TestDocument(Document): |         class TestDocument(Document): | ||||||
| @@ -38,5 +43,5 @@ class TestIntField(MongoDBTestCase): | |||||||
|         TestDocument(int_fld=None).save() |         TestDocument(int_fld=None).save() | ||||||
|         TestDocument(int_fld=1).save() |         TestDocument(int_fld=1).save() | ||||||
|  |  | ||||||
|         self.assertEqual(1, TestDocument.objects(int_fld__ne=None).count()) |         assert 1 == TestDocument.objects(int_fld__ne=None).count() | ||||||
|         self.assertEqual(1, TestDocument.objects(int_fld__ne=1).count()) |         assert 1 == TestDocument.objects(int_fld__ne=1).count() | ||||||
|   | |||||||
| @@ -1,5 +1,6 @@ | |||||||
| # -*- coding: utf-8 -*- | # -*- coding: utf-8 -*- | ||||||
| from bson import DBRef, ObjectId | from bson import DBRef, ObjectId | ||||||
|  | import pytest | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| from mongoengine.base import LazyReference | from mongoengine.base import LazyReference | ||||||
| @@ -11,7 +12,8 @@ class TestLazyReferenceField(MongoDBTestCase): | |||||||
|     def test_lazy_reference_config(self): |     def test_lazy_reference_config(self): | ||||||
|         # Make sure ReferenceField only accepts a document class or a string |         # Make sure ReferenceField only accepts a document class or a string | ||||||
|         # with a document class name. |         # with a document class name. | ||||||
|         self.assertRaises(ValidationError, LazyReferenceField, EmbeddedDocument) |         with pytest.raises(ValidationError): | ||||||
|  |             LazyReferenceField(EmbeddedDocument) | ||||||
|  |  | ||||||
|     def test___repr__(self): |     def test___repr__(self): | ||||||
|         class Animal(Document): |         class Animal(Document): | ||||||
| @@ -25,7 +27,7 @@ class TestLazyReferenceField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         animal = Animal() |         animal = Animal() | ||||||
|         oc = Ocurrence(animal=animal) |         oc = Ocurrence(animal=animal) | ||||||
|         self.assertIn('LazyReference', repr(oc.animal)) |         assert "LazyReference" in repr(oc.animal) | ||||||
|  |  | ||||||
|     def test___getattr___unknown_attr_raises_attribute_error(self): |     def test___getattr___unknown_attr_raises_attribute_error(self): | ||||||
|         class Animal(Document): |         class Animal(Document): | ||||||
| @@ -39,7 +41,7 @@ class TestLazyReferenceField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         animal = Animal().save() |         animal = Animal().save() | ||||||
|         oc = Ocurrence(animal=animal) |         oc = Ocurrence(animal=animal) | ||||||
|         with self.assertRaises(AttributeError): |         with pytest.raises(AttributeError): | ||||||
|             oc.animal.not_exist |             oc.animal.not_exist | ||||||
|  |  | ||||||
|     def test_lazy_reference_simple(self): |     def test_lazy_reference_simple(self): | ||||||
| @@ -57,19 +59,19 @@ class TestLazyReferenceField(MongoDBTestCase): | |||||||
|         animal = Animal(name="Leopard", tag="heavy").save() |         animal = Animal(name="Leopard", tag="heavy").save() | ||||||
|         Ocurrence(person="test", animal=animal).save() |         Ocurrence(person="test", animal=animal).save() | ||||||
|         p = Ocurrence.objects.get() |         p = Ocurrence.objects.get() | ||||||
|         self.assertIsInstance(p.animal, LazyReference) |         assert isinstance(p.animal, LazyReference) | ||||||
|         fetched_animal = p.animal.fetch() |         fetched_animal = p.animal.fetch() | ||||||
|         self.assertEqual(fetched_animal, animal) |         assert fetched_animal == animal | ||||||
|         # `fetch` keep cache on referenced document by default... |         # `fetch` keep cache on referenced document by default... | ||||||
|         animal.tag = "not so heavy" |         animal.tag = "not so heavy" | ||||||
|         animal.save() |         animal.save() | ||||||
|         double_fetch = p.animal.fetch() |         double_fetch = p.animal.fetch() | ||||||
|         self.assertIs(fetched_animal, double_fetch) |         assert fetched_animal is double_fetch | ||||||
|         self.assertEqual(double_fetch.tag, "heavy") |         assert double_fetch.tag == "heavy" | ||||||
|         # ...unless specified otherwise |         # ...unless specified otherwise | ||||||
|         fetch_force = p.animal.fetch(force=True) |         fetch_force = p.animal.fetch(force=True) | ||||||
|         self.assertIsNot(fetch_force, fetched_animal) |         assert fetch_force is not fetched_animal | ||||||
|         self.assertEqual(fetch_force.tag, "not so heavy") |         assert fetch_force.tag == "not so heavy" | ||||||
|  |  | ||||||
|     def test_lazy_reference_fetch_invalid_ref(self): |     def test_lazy_reference_fetch_invalid_ref(self): | ||||||
|         class Animal(Document): |         class Animal(Document): | ||||||
| @@ -87,13 +89,13 @@ class TestLazyReferenceField(MongoDBTestCase): | |||||||
|         Ocurrence(person="test", animal=animal).save() |         Ocurrence(person="test", animal=animal).save() | ||||||
|         animal.delete() |         animal.delete() | ||||||
|         p = Ocurrence.objects.get() |         p = Ocurrence.objects.get() | ||||||
|         self.assertIsInstance(p.animal, LazyReference) |         assert isinstance(p.animal, LazyReference) | ||||||
|         with self.assertRaises(DoesNotExist): |         with pytest.raises(DoesNotExist): | ||||||
|             p.animal.fetch() |             p.animal.fetch() | ||||||
|  |  | ||||||
|     def test_lazy_reference_set(self): |     def test_lazy_reference_set(self): | ||||||
|         class Animal(Document): |         class Animal(Document): | ||||||
|             meta = {'allow_inheritance': True} |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             tag = StringField() |             tag = StringField() | ||||||
| @@ -109,21 +111,20 @@ class TestLazyReferenceField(MongoDBTestCase): | |||||||
|             nick = StringField() |             nick = StringField() | ||||||
|  |  | ||||||
|         animal = Animal(name="Leopard", tag="heavy").save() |         animal = Animal(name="Leopard", tag="heavy").save() | ||||||
|         sub_animal = SubAnimal(nick='doggo', name='dog').save() |         sub_animal = SubAnimal(nick="doggo", name="dog").save() | ||||||
|         for ref in ( |         for ref in ( | ||||||
|                 animal, |             animal, | ||||||
|                 animal.pk, |             animal.pk, | ||||||
|                 DBRef(animal._get_collection_name(), animal.pk), |             DBRef(animal._get_collection_name(), animal.pk), | ||||||
|                 LazyReference(Animal, animal.pk), |             LazyReference(Animal, animal.pk), | ||||||
|  |             sub_animal, | ||||||
|                 sub_animal, |             sub_animal.pk, | ||||||
|                 sub_animal.pk, |             DBRef(sub_animal._get_collection_name(), sub_animal.pk), | ||||||
|                 DBRef(sub_animal._get_collection_name(), sub_animal.pk), |             LazyReference(SubAnimal, sub_animal.pk), | ||||||
|                 LazyReference(SubAnimal, sub_animal.pk), |         ): | ||||||
|                 ): |  | ||||||
|             p = Ocurrence(person="test", animal=ref).save() |             p = Ocurrence(person="test", animal=ref).save() | ||||||
|             p.reload() |             p.reload() | ||||||
|             self.assertIsInstance(p.animal, LazyReference) |             assert isinstance(p.animal, LazyReference) | ||||||
|             p.animal.fetch() |             p.animal.fetch() | ||||||
|  |  | ||||||
|     def test_lazy_reference_bad_set(self): |     def test_lazy_reference_bad_set(self): | ||||||
| @@ -144,19 +145,20 @@ class TestLazyReferenceField(MongoDBTestCase): | |||||||
|         animal = Animal(name="Leopard", tag="heavy").save() |         animal = Animal(name="Leopard", tag="heavy").save() | ||||||
|         baddoc = BadDoc().save() |         baddoc = BadDoc().save() | ||||||
|         for bad in ( |         for bad in ( | ||||||
|                 42, |             42, | ||||||
|                 'foo', |             "foo", | ||||||
|                 baddoc, |             baddoc, | ||||||
|                 DBRef(baddoc._get_collection_name(), animal.pk), |             DBRef(baddoc._get_collection_name(), animal.pk), | ||||||
|                 LazyReference(BadDoc, animal.pk) |             LazyReference(BadDoc, animal.pk), | ||||||
|                 ): |         ): | ||||||
|             with self.assertRaises(ValidationError): |             with pytest.raises(ValidationError): | ||||||
|                 p = Ocurrence(person="test", animal=bad).save() |                 Ocurrence(person="test", animal=bad).save() | ||||||
|  |  | ||||||
|     def test_lazy_reference_query_conversion(self): |     def test_lazy_reference_query_conversion(self): | ||||||
|         """Ensure that LazyReferenceFields can be queried using objects and values |         """Ensure that LazyReferenceFields can be queried using objects and values | ||||||
|         of the type of the primary key of the referenced object. |         of the type of the primary key of the referenced object. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class Member(Document): |         class Member(Document): | ||||||
|             user_num = IntField(primary_key=True) |             user_num = IntField(primary_key=True) | ||||||
|  |  | ||||||
| @@ -172,26 +174,27 @@ class TestLazyReferenceField(MongoDBTestCase): | |||||||
|         m2 = Member(user_num=2) |         m2 = Member(user_num=2) | ||||||
|         m2.save() |         m2.save() | ||||||
|  |  | ||||||
|         post1 = BlogPost(title='post 1', author=m1) |         post1 = BlogPost(title="post 1", author=m1) | ||||||
|         post1.save() |         post1.save() | ||||||
|  |  | ||||||
|         post2 = BlogPost(title='post 2', author=m2) |         post2 = BlogPost(title="post 2", author=m2) | ||||||
|         post2.save() |         post2.save() | ||||||
|  |  | ||||||
|         post = BlogPost.objects(author=m1).first() |         post = BlogPost.objects(author=m1).first() | ||||||
|         self.assertEqual(post.id, post1.id) |         assert post.id == post1.id | ||||||
|  |  | ||||||
|         post = BlogPost.objects(author=m2).first() |         post = BlogPost.objects(author=m2).first() | ||||||
|         self.assertEqual(post.id, post2.id) |         assert post.id == post2.id | ||||||
|  |  | ||||||
|         # Same thing by passing a LazyReference instance |         # Same thing by passing a LazyReference instance | ||||||
|         post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() |         post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() | ||||||
|         self.assertEqual(post.id, post2.id) |         assert post.id == post2.id | ||||||
|  |  | ||||||
|     def test_lazy_reference_query_conversion_dbref(self): |     def test_lazy_reference_query_conversion_dbref(self): | ||||||
|         """Ensure that LazyReferenceFields can be queried using objects and values |         """Ensure that LazyReferenceFields can be queried using objects and values | ||||||
|         of the type of the primary key of the referenced object. |         of the type of the primary key of the referenced object. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class Member(Document): |         class Member(Document): | ||||||
|             user_num = IntField(primary_key=True) |             user_num = IntField(primary_key=True) | ||||||
|  |  | ||||||
| @@ -207,21 +210,21 @@ class TestLazyReferenceField(MongoDBTestCase): | |||||||
|         m2 = Member(user_num=2) |         m2 = Member(user_num=2) | ||||||
|         m2.save() |         m2.save() | ||||||
|  |  | ||||||
|         post1 = BlogPost(title='post 1', author=m1) |         post1 = BlogPost(title="post 1", author=m1) | ||||||
|         post1.save() |         post1.save() | ||||||
|  |  | ||||||
|         post2 = BlogPost(title='post 2', author=m2) |         post2 = BlogPost(title="post 2", author=m2) | ||||||
|         post2.save() |         post2.save() | ||||||
|  |  | ||||||
|         post = BlogPost.objects(author=m1).first() |         post = BlogPost.objects(author=m1).first() | ||||||
|         self.assertEqual(post.id, post1.id) |         assert post.id == post1.id | ||||||
|  |  | ||||||
|         post = BlogPost.objects(author=m2).first() |         post = BlogPost.objects(author=m2).first() | ||||||
|         self.assertEqual(post.id, post2.id) |         assert post.id == post2.id | ||||||
|  |  | ||||||
|         # Same thing by passing a LazyReference instance |         # Same thing by passing a LazyReference instance | ||||||
|         post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() |         post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() | ||||||
|         self.assertEqual(post.id, post2.id) |         assert post.id == post2.id | ||||||
|  |  | ||||||
|     def test_lazy_reference_passthrough(self): |     def test_lazy_reference_passthrough(self): | ||||||
|         class Animal(Document): |         class Animal(Document): | ||||||
| @@ -238,21 +241,21 @@ class TestLazyReferenceField(MongoDBTestCase): | |||||||
|         animal = Animal(name="Leopard", tag="heavy").save() |         animal = Animal(name="Leopard", tag="heavy").save() | ||||||
|         Ocurrence(animal=animal, animal_passthrough=animal).save() |         Ocurrence(animal=animal, animal_passthrough=animal).save() | ||||||
|         p = Ocurrence.objects.get() |         p = Ocurrence.objects.get() | ||||||
|         self.assertIsInstance(p.animal, LazyReference) |         assert isinstance(p.animal, LazyReference) | ||||||
|         with self.assertRaises(KeyError): |         with pytest.raises(KeyError): | ||||||
|             p.animal['name'] |             p.animal["name"] | ||||||
|         with self.assertRaises(AttributeError): |         with pytest.raises(AttributeError): | ||||||
|             p.animal.name |             p.animal.name | ||||||
|         self.assertEqual(p.animal.pk, animal.pk) |         assert p.animal.pk == animal.pk | ||||||
|  |  | ||||||
|         self.assertEqual(p.animal_passthrough.name, "Leopard") |         assert p.animal_passthrough.name == "Leopard" | ||||||
|         self.assertEqual(p.animal_passthrough['name'], "Leopard") |         assert p.animal_passthrough["name"] == "Leopard" | ||||||
|  |  | ||||||
|         # Should not be able to access referenced document's methods |         # Should not be able to access referenced document's methods | ||||||
|         with self.assertRaises(AttributeError): |         with pytest.raises(AttributeError): | ||||||
|             p.animal.save |             p.animal.save | ||||||
|         with self.assertRaises(KeyError): |         with pytest.raises(KeyError): | ||||||
|             p.animal['save'] |             p.animal["save"] | ||||||
|  |  | ||||||
|     def test_lazy_reference_not_set(self): |     def test_lazy_reference_not_set(self): | ||||||
|         class Animal(Document): |         class Animal(Document): | ||||||
| @@ -266,9 +269,9 @@ class TestLazyReferenceField(MongoDBTestCase): | |||||||
|         Animal.drop_collection() |         Animal.drop_collection() | ||||||
|         Ocurrence.drop_collection() |         Ocurrence.drop_collection() | ||||||
|  |  | ||||||
|         Ocurrence(person='foo').save() |         Ocurrence(person="foo").save() | ||||||
|         p = Ocurrence.objects.get() |         p = Ocurrence.objects.get() | ||||||
|         self.assertIs(p.animal, None) |         assert p.animal is None | ||||||
|  |  | ||||||
|     def test_lazy_reference_equality(self): |     def test_lazy_reference_equality(self): | ||||||
|         class Animal(Document): |         class Animal(Document): | ||||||
| @@ -279,12 +282,12 @@ class TestLazyReferenceField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         animal = Animal(name="Leopard", tag="heavy").save() |         animal = Animal(name="Leopard", tag="heavy").save() | ||||||
|         animalref = LazyReference(Animal, animal.pk) |         animalref = LazyReference(Animal, animal.pk) | ||||||
|         self.assertEqual(animal, animalref) |         assert animal == animalref | ||||||
|         self.assertEqual(animalref, animal) |         assert animalref == animal | ||||||
|  |  | ||||||
|         other_animalref = LazyReference(Animal, ObjectId("54495ad94c934721ede76f90")) |         other_animalref = LazyReference(Animal, ObjectId("54495ad94c934721ede76f90")) | ||||||
|         self.assertNotEqual(animal, other_animalref) |         assert animal != other_animalref | ||||||
|         self.assertNotEqual(other_animalref, animal) |         assert other_animalref != animal | ||||||
|  |  | ||||||
|     def test_lazy_reference_embedded(self): |     def test_lazy_reference_embedded(self): | ||||||
|         class Animal(Document): |         class Animal(Document): | ||||||
| @@ -303,21 +306,21 @@ class TestLazyReferenceField(MongoDBTestCase): | |||||||
|         Animal.drop_collection() |         Animal.drop_collection() | ||||||
|         Ocurrence.drop_collection() |         Ocurrence.drop_collection() | ||||||
|  |  | ||||||
|         animal1 = Animal('doggo').save() |         animal1 = Animal(name="doggo").save() | ||||||
|         animal2 = Animal('cheeta').save() |         animal2 = Animal(name="cheeta").save() | ||||||
|  |  | ||||||
|         def check_fields_type(occ): |         def check_fields_type(occ): | ||||||
|             self.assertIsInstance(occ.direct, LazyReference) |             assert isinstance(occ.direct, LazyReference) | ||||||
|             for elem in occ.in_list: |             for elem in occ.in_list: | ||||||
|                 self.assertIsInstance(elem, LazyReference) |                 assert isinstance(elem, LazyReference) | ||||||
|             self.assertIsInstance(occ.in_embedded.direct, LazyReference) |             assert isinstance(occ.in_embedded.direct, LazyReference) | ||||||
|             for elem in occ.in_embedded.in_list: |             for elem in occ.in_embedded.in_list: | ||||||
|                 self.assertIsInstance(elem, LazyReference) |                 assert isinstance(elem, LazyReference) | ||||||
|  |  | ||||||
|         occ = Ocurrence( |         occ = Ocurrence( | ||||||
|             in_list=[animal1, animal2], |             in_list=[animal1, animal2], | ||||||
|             in_embedded={'in_list': [animal1, animal2], 'direct': animal1}, |             in_embedded={"in_list": [animal1, animal2], "direct": animal1}, | ||||||
|             direct=animal1 |             direct=animal1, | ||||||
|         ).save() |         ).save() | ||||||
|         check_fields_type(occ) |         check_fields_type(occ) | ||||||
|         occ.reload() |         occ.reload() | ||||||
| @@ -345,19 +348,19 @@ class TestGenericLazyReferenceField(MongoDBTestCase): | |||||||
|         animal = Animal(name="Leopard", tag="heavy").save() |         animal = Animal(name="Leopard", tag="heavy").save() | ||||||
|         Ocurrence(person="test", animal=animal).save() |         Ocurrence(person="test", animal=animal).save() | ||||||
|         p = Ocurrence.objects.get() |         p = Ocurrence.objects.get() | ||||||
|         self.assertIsInstance(p.animal, LazyReference) |         assert isinstance(p.animal, LazyReference) | ||||||
|         fetched_animal = p.animal.fetch() |         fetched_animal = p.animal.fetch() | ||||||
|         self.assertEqual(fetched_animal, animal) |         assert fetched_animal == animal | ||||||
|         # `fetch` keep cache on referenced document by default... |         # `fetch` keep cache on referenced document by default... | ||||||
|         animal.tag = "not so heavy" |         animal.tag = "not so heavy" | ||||||
|         animal.save() |         animal.save() | ||||||
|         double_fetch = p.animal.fetch() |         double_fetch = p.animal.fetch() | ||||||
|         self.assertIs(fetched_animal, double_fetch) |         assert fetched_animal is double_fetch | ||||||
|         self.assertEqual(double_fetch.tag, "heavy") |         assert double_fetch.tag == "heavy" | ||||||
|         # ...unless specified otherwise |         # ...unless specified otherwise | ||||||
|         fetch_force = p.animal.fetch(force=True) |         fetch_force = p.animal.fetch(force=True) | ||||||
|         self.assertIsNot(fetch_force, fetched_animal) |         assert fetch_force is not fetched_animal | ||||||
|         self.assertEqual(fetch_force.tag, "not so heavy") |         assert fetch_force.tag == "not so heavy" | ||||||
|  |  | ||||||
|     def test_generic_lazy_reference_choices(self): |     def test_generic_lazy_reference_choices(self): | ||||||
|         class Animal(Document): |         class Animal(Document): | ||||||
| @@ -383,14 +386,14 @@ class TestGenericLazyReferenceField(MongoDBTestCase): | |||||||
|         mineral = Mineral(name="Granite").save() |         mineral = Mineral(name="Granite").save() | ||||||
|  |  | ||||||
|         occ_animal = Ocurrence(living_thing=animal, thing=animal).save() |         occ_animal = Ocurrence(living_thing=animal, thing=animal).save() | ||||||
|         occ_vegetal = Ocurrence(living_thing=vegetal, thing=vegetal).save() |         _ = Ocurrence(living_thing=vegetal, thing=vegetal).save() | ||||||
|         with self.assertRaises(ValidationError): |         with pytest.raises(ValidationError): | ||||||
|             Ocurrence(living_thing=mineral).save() |             Ocurrence(living_thing=mineral).save() | ||||||
|  |  | ||||||
|         occ = Ocurrence.objects.get(living_thing=animal) |         occ = Ocurrence.objects.get(living_thing=animal) | ||||||
|         self.assertEqual(occ, occ_animal) |         assert occ == occ_animal | ||||||
|         self.assertIsInstance(occ.thing, LazyReference) |         assert isinstance(occ.thing, LazyReference) | ||||||
|         self.assertIsInstance(occ.living_thing, LazyReference) |         assert isinstance(occ.living_thing, LazyReference) | ||||||
|  |  | ||||||
|         occ.thing = vegetal |         occ.thing = vegetal | ||||||
|         occ.living_thing = vegetal |         occ.living_thing = vegetal | ||||||
| @@ -398,12 +401,12 @@ class TestGenericLazyReferenceField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         occ.thing = mineral |         occ.thing = mineral | ||||||
|         occ.living_thing = mineral |         occ.living_thing = mineral | ||||||
|         with self.assertRaises(ValidationError): |         with pytest.raises(ValidationError): | ||||||
|             occ.save() |             occ.save() | ||||||
|  |  | ||||||
|     def test_generic_lazy_reference_set(self): |     def test_generic_lazy_reference_set(self): | ||||||
|         class Animal(Document): |         class Animal(Document): | ||||||
|             meta = {'allow_inheritance': True} |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             tag = StringField() |             tag = StringField() | ||||||
| @@ -419,19 +422,21 @@ class TestGenericLazyReferenceField(MongoDBTestCase): | |||||||
|             nick = StringField() |             nick = StringField() | ||||||
|  |  | ||||||
|         animal = Animal(name="Leopard", tag="heavy").save() |         animal = Animal(name="Leopard", tag="heavy").save() | ||||||
|         sub_animal = SubAnimal(nick='doggo', name='dog').save() |         sub_animal = SubAnimal(nick="doggo", name="dog").save() | ||||||
|         for ref in ( |         for ref in ( | ||||||
|                 animal, |             animal, | ||||||
|                 LazyReference(Animal, animal.pk), |             LazyReference(Animal, animal.pk), | ||||||
|                 {'_cls': 'Animal', '_ref': DBRef(animal._get_collection_name(), animal.pk)}, |             {"_cls": "Animal", "_ref": DBRef(animal._get_collection_name(), animal.pk)}, | ||||||
|  |             sub_animal, | ||||||
|                 sub_animal, |             LazyReference(SubAnimal, sub_animal.pk), | ||||||
|                 LazyReference(SubAnimal, sub_animal.pk), |             { | ||||||
|                 {'_cls': 'SubAnimal', '_ref': DBRef(sub_animal._get_collection_name(), sub_animal.pk)}, |                 "_cls": "SubAnimal", | ||||||
|                 ): |                 "_ref": DBRef(sub_animal._get_collection_name(), sub_animal.pk), | ||||||
|  |             }, | ||||||
|  |         ): | ||||||
|             p = Ocurrence(person="test", animal=ref).save() |             p = Ocurrence(person="test", animal=ref).save() | ||||||
|             p.reload() |             p.reload() | ||||||
|             self.assertIsInstance(p.animal, (LazyReference, Document)) |             assert isinstance(p.animal, (LazyReference, Document)) | ||||||
|             p.animal.fetch() |             p.animal.fetch() | ||||||
|  |  | ||||||
|     def test_generic_lazy_reference_bad_set(self): |     def test_generic_lazy_reference_bad_set(self): | ||||||
| @@ -441,7 +446,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         class Ocurrence(Document): |         class Ocurrence(Document): | ||||||
|             person = StringField() |             person = StringField() | ||||||
|             animal = GenericLazyReferenceField(choices=['Animal']) |             animal = GenericLazyReferenceField(choices=["Animal"]) | ||||||
|  |  | ||||||
|         Animal.drop_collection() |         Animal.drop_collection() | ||||||
|         Ocurrence.drop_collection() |         Ocurrence.drop_collection() | ||||||
| @@ -451,14 +456,9 @@ class TestGenericLazyReferenceField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         animal = Animal(name="Leopard", tag="heavy").save() |         animal = Animal(name="Leopard", tag="heavy").save() | ||||||
|         baddoc = BadDoc().save() |         baddoc = BadDoc().save() | ||||||
|         for bad in ( |         for bad in (42, "foo", baddoc, LazyReference(BadDoc, animal.pk)): | ||||||
|                 42, |             with pytest.raises(ValidationError): | ||||||
|                 'foo', |                 Ocurrence(person="test", animal=bad).save() | ||||||
|                 baddoc, |  | ||||||
|                 LazyReference(BadDoc, animal.pk) |  | ||||||
|                 ): |  | ||||||
|             with self.assertRaises(ValidationError): |  | ||||||
|                 p = Ocurrence(person="test", animal=bad).save() |  | ||||||
|  |  | ||||||
|     def test_generic_lazy_reference_query_conversion(self): |     def test_generic_lazy_reference_query_conversion(self): | ||||||
|         class Member(Document): |         class Member(Document): | ||||||
| @@ -476,21 +476,21 @@ class TestGenericLazyReferenceField(MongoDBTestCase): | |||||||
|         m2 = Member(user_num=2) |         m2 = Member(user_num=2) | ||||||
|         m2.save() |         m2.save() | ||||||
|  |  | ||||||
|         post1 = BlogPost(title='post 1', author=m1) |         post1 = BlogPost(title="post 1", author=m1) | ||||||
|         post1.save() |         post1.save() | ||||||
|  |  | ||||||
|         post2 = BlogPost(title='post 2', author=m2) |         post2 = BlogPost(title="post 2", author=m2) | ||||||
|         post2.save() |         post2.save() | ||||||
|  |  | ||||||
|         post = BlogPost.objects(author=m1).first() |         post = BlogPost.objects(author=m1).first() | ||||||
|         self.assertEqual(post.id, post1.id) |         assert post.id == post1.id | ||||||
|  |  | ||||||
|         post = BlogPost.objects(author=m2).first() |         post = BlogPost.objects(author=m2).first() | ||||||
|         self.assertEqual(post.id, post2.id) |         assert post.id == post2.id | ||||||
|  |  | ||||||
|         # Same thing by passing a LazyReference instance |         # Same thing by passing a LazyReference instance | ||||||
|         post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() |         post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() | ||||||
|         self.assertEqual(post.id, post2.id) |         assert post.id == post2.id | ||||||
|  |  | ||||||
|     def test_generic_lazy_reference_not_set(self): |     def test_generic_lazy_reference_not_set(self): | ||||||
|         class Animal(Document): |         class Animal(Document): | ||||||
| @@ -504,9 +504,9 @@ class TestGenericLazyReferenceField(MongoDBTestCase): | |||||||
|         Animal.drop_collection() |         Animal.drop_collection() | ||||||
|         Ocurrence.drop_collection() |         Ocurrence.drop_collection() | ||||||
|  |  | ||||||
|         Ocurrence(person='foo').save() |         Ocurrence(person="foo").save() | ||||||
|         p = Ocurrence.objects.get() |         p = Ocurrence.objects.get() | ||||||
|         self.assertIs(p.animal, None) |         assert p.animal is None | ||||||
|  |  | ||||||
|     def test_generic_lazy_reference_accepts_string_instead_of_class(self): |     def test_generic_lazy_reference_accepts_string_instead_of_class(self): | ||||||
|         class Animal(Document): |         class Animal(Document): | ||||||
| @@ -515,7 +515,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         class Ocurrence(Document): |         class Ocurrence(Document): | ||||||
|             person = StringField() |             person = StringField() | ||||||
|             animal = GenericLazyReferenceField('Animal') |             animal = GenericLazyReferenceField("Animal") | ||||||
|  |  | ||||||
|         Animal.drop_collection() |         Animal.drop_collection() | ||||||
|         Ocurrence.drop_collection() |         Ocurrence.drop_collection() | ||||||
| @@ -523,7 +523,7 @@ class TestGenericLazyReferenceField(MongoDBTestCase): | |||||||
|         animal = Animal().save() |         animal = Animal().save() | ||||||
|         Ocurrence(animal=animal).save() |         Ocurrence(animal=animal).save() | ||||||
|         p = Ocurrence.objects.get() |         p = Ocurrence.objects.get() | ||||||
|         self.assertEqual(p.animal, animal) |         assert p.animal == animal | ||||||
|  |  | ||||||
|     def test_generic_lazy_reference_embedded(self): |     def test_generic_lazy_reference_embedded(self): | ||||||
|         class Animal(Document): |         class Animal(Document): | ||||||
| @@ -542,27 +542,33 @@ class TestGenericLazyReferenceField(MongoDBTestCase): | |||||||
|         Animal.drop_collection() |         Animal.drop_collection() | ||||||
|         Ocurrence.drop_collection() |         Ocurrence.drop_collection() | ||||||
|  |  | ||||||
|         animal1 = Animal('doggo').save() |         animal1 = Animal(name="doggo").save() | ||||||
|         animal2 = Animal('cheeta').save() |         animal2 = Animal(name="cheeta").save() | ||||||
|  |  | ||||||
|         def check_fields_type(occ): |         def check_fields_type(occ): | ||||||
|             self.assertIsInstance(occ.direct, LazyReference) |             assert isinstance(occ.direct, LazyReference) | ||||||
|             for elem in occ.in_list: |             for elem in occ.in_list: | ||||||
|                 self.assertIsInstance(elem, LazyReference) |                 assert isinstance(elem, LazyReference) | ||||||
|             self.assertIsInstance(occ.in_embedded.direct, LazyReference) |             assert isinstance(occ.in_embedded.direct, LazyReference) | ||||||
|             for elem in occ.in_embedded.in_list: |             for elem in occ.in_embedded.in_list: | ||||||
|                 self.assertIsInstance(elem, LazyReference) |                 assert isinstance(elem, LazyReference) | ||||||
|  |  | ||||||
|         occ = Ocurrence( |         occ = Ocurrence( | ||||||
|             in_list=[animal1, animal2], |             in_list=[animal1, animal2], | ||||||
|             in_embedded={'in_list': [animal1, animal2], 'direct': animal1}, |             in_embedded={"in_list": [animal1, animal2], "direct": animal1}, | ||||||
|             direct=animal1 |             direct=animal1, | ||||||
|         ).save() |         ).save() | ||||||
|         check_fields_type(occ) |         check_fields_type(occ) | ||||||
|         occ.reload() |         occ.reload() | ||||||
|         check_fields_type(occ) |         check_fields_type(occ) | ||||||
|         animal1_ref = {'_cls': 'Animal', '_ref': DBRef(animal1._get_collection_name(), animal1.pk)} |         animal1_ref = { | ||||||
|         animal2_ref = {'_cls': 'Animal', '_ref': DBRef(animal2._get_collection_name(), animal2.pk)} |             "_cls": "Animal", | ||||||
|  |             "_ref": DBRef(animal1._get_collection_name(), animal1.pk), | ||||||
|  |         } | ||||||
|  |         animal2_ref = { | ||||||
|  |             "_cls": "Animal", | ||||||
|  |             "_ref": DBRef(animal2._get_collection_name(), animal2.pk), | ||||||
|  |         } | ||||||
|         occ.direct = animal1_ref |         occ.direct = animal1_ref | ||||||
|         occ.in_list = [animal1_ref, animal2_ref] |         occ.in_list = [animal1_ref, animal2_ref] | ||||||
|         occ.in_embedded.direct = animal1_ref |         occ.in_embedded.direct = animal1_ref | ||||||
|   | |||||||
| @@ -1,10 +1,5 @@ | |||||||
| # -*- coding: utf-8 -*- | from bson.int64 import Int64 | ||||||
| import six | import pytest | ||||||
|  |  | ||||||
| try: |  | ||||||
|     from bson.int64 import Int64 |  | ||||||
| except ImportError: |  | ||||||
|     Int64 = long |  | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| from mongoengine.connection import get_db | from mongoengine.connection import get_db | ||||||
| @@ -13,23 +8,26 @@ from tests.utils import MongoDBTestCase | |||||||
|  |  | ||||||
|  |  | ||||||
| class TestLongField(MongoDBTestCase): | class TestLongField(MongoDBTestCase): | ||||||
|  |  | ||||||
|     def test_long_field_is_considered_as_int64(self): |     def test_long_field_is_considered_as_int64(self): | ||||||
|         """ |         """ | ||||||
|         Tests that long fields are stored as long in mongo, even if long |         Tests that long fields are stored as long in mongo, even if long | ||||||
|         value is small enough to be an int. |         value is small enough to be an int. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class TestLongFieldConsideredAsInt64(Document): |         class TestLongFieldConsideredAsInt64(Document): | ||||||
|             some_long = LongField() |             some_long = LongField() | ||||||
|  |  | ||||||
|         doc = TestLongFieldConsideredAsInt64(some_long=42).save() |         doc = TestLongFieldConsideredAsInt64(some_long=42).save() | ||||||
|         db = get_db() |         db = get_db() | ||||||
|         self.assertIsInstance(db.test_long_field_considered_as_int64.find()[0]['some_long'], Int64) |         assert isinstance( | ||||||
|         self.assertIsInstance(doc.some_long, six.integer_types) |             db.test_long_field_considered_as_int64.find()[0]["some_long"], Int64 | ||||||
|  |         ) | ||||||
|  |         assert isinstance(doc.some_long, int) | ||||||
|  |  | ||||||
|     def test_long_validation(self): |     def test_long_validation(self): | ||||||
|         """Ensure that invalid values cannot be assigned to long fields. |         """Ensure that invalid values cannot be assigned to long fields. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class TestDocument(Document): |         class TestDocument(Document): | ||||||
|             value = LongField(min_value=0, max_value=110) |             value = LongField(min_value=0, max_value=110) | ||||||
|  |  | ||||||
| @@ -38,11 +36,14 @@ class TestLongField(MongoDBTestCase): | |||||||
|         doc.validate() |         doc.validate() | ||||||
|  |  | ||||||
|         doc.value = -1 |         doc.value = -1 | ||||||
|         self.assertRaises(ValidationError, doc.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             doc.validate() | ||||||
|         doc.value = 120 |         doc.value = 120 | ||||||
|         self.assertRaises(ValidationError, doc.validate) |         with pytest.raises(ValidationError): | ||||||
|         doc.value = 'ten' |             doc.validate() | ||||||
|         self.assertRaises(ValidationError, doc.validate) |         doc.value = "ten" | ||||||
|  |         with pytest.raises(ValidationError): | ||||||
|  |             doc.validate() | ||||||
|  |  | ||||||
|     def test_long_ne_operator(self): |     def test_long_ne_operator(self): | ||||||
|         class TestDocument(Document): |         class TestDocument(Document): | ||||||
| @@ -53,4 +54,4 @@ class TestLongField(MongoDBTestCase): | |||||||
|         TestDocument(long_fld=None).save() |         TestDocument(long_fld=None).save() | ||||||
|         TestDocument(long_fld=1).save() |         TestDocument(long_fld=1).save() | ||||||
|  |  | ||||||
|         self.assertEqual(1, TestDocument.objects(long_fld__ne=None).count()) |         assert 1 == TestDocument.objects(long_fld__ne=None).count() | ||||||
|   | |||||||
| @@ -1,29 +1,31 @@ | |||||||
| # -*- coding: utf-8 -*- | # -*- coding: utf-8 -*- | ||||||
| import datetime | import datetime | ||||||
|  |  | ||||||
| from mongoengine import * | import pytest | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
| from tests.utils import MongoDBTestCase | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestMapField(MongoDBTestCase): | class TestMapField(MongoDBTestCase): | ||||||
|  |  | ||||||
|     def test_mapfield(self): |     def test_mapfield(self): | ||||||
|         """Ensure that the MapField handles the declared type.""" |         """Ensure that the MapField handles the declared type.""" | ||||||
|  |  | ||||||
|         class Simple(Document): |         class Simple(Document): | ||||||
|             mapping = MapField(IntField()) |             mapping = MapField(IntField()) | ||||||
|  |  | ||||||
|         Simple.drop_collection() |         Simple.drop_collection() | ||||||
|  |  | ||||||
|         e = Simple() |         e = Simple() | ||||||
|         e.mapping['someint'] = 1 |         e.mapping["someint"] = 1 | ||||||
|         e.save() |         e.save() | ||||||
|  |  | ||||||
|         with self.assertRaises(ValidationError): |         with pytest.raises(ValidationError): | ||||||
|             e.mapping['somestring'] = "abc" |             e.mapping["somestring"] = "abc" | ||||||
|             e.save() |             e.save() | ||||||
|  |  | ||||||
|         with self.assertRaises(ValidationError): |         with pytest.raises(ValidationError): | ||||||
|  |  | ||||||
|             class NoDeclaredType(Document): |             class NoDeclaredType(Document): | ||||||
|                 mapping = MapField() |                 mapping = MapField() | ||||||
|  |  | ||||||
| @@ -45,38 +47,37 @@ class TestMapField(MongoDBTestCase): | |||||||
|         Extensible.drop_collection() |         Extensible.drop_collection() | ||||||
|  |  | ||||||
|         e = Extensible() |         e = Extensible() | ||||||
|         e.mapping['somestring'] = StringSetting(value='foo') |         e.mapping["somestring"] = StringSetting(value="foo") | ||||||
|         e.mapping['someint'] = IntegerSetting(value=42) |         e.mapping["someint"] = IntegerSetting(value=42) | ||||||
|         e.save() |         e.save() | ||||||
|  |  | ||||||
|         e2 = Extensible.objects.get(id=e.id) |         e2 = Extensible.objects.get(id=e.id) | ||||||
|         self.assertIsInstance(e2.mapping['somestring'], StringSetting) |         assert isinstance(e2.mapping["somestring"], StringSetting) | ||||||
|         self.assertIsInstance(e2.mapping['someint'], IntegerSetting) |         assert isinstance(e2.mapping["someint"], IntegerSetting) | ||||||
|  |  | ||||||
|         with self.assertRaises(ValidationError): |         with pytest.raises(ValidationError): | ||||||
|             e.mapping['someint'] = 123 |             e.mapping["someint"] = 123 | ||||||
|             e.save() |             e.save() | ||||||
|  |  | ||||||
|     def test_embedded_mapfield_db_field(self): |     def test_embedded_mapfield_db_field(self): | ||||||
|         class Embedded(EmbeddedDocument): |         class Embedded(EmbeddedDocument): | ||||||
|             number = IntField(default=0, db_field='i') |             number = IntField(default=0, db_field="i") | ||||||
|  |  | ||||||
|         class Test(Document): |         class Test(Document): | ||||||
|             my_map = MapField(field=EmbeddedDocumentField(Embedded), |             my_map = MapField(field=EmbeddedDocumentField(Embedded), db_field="x") | ||||||
|                               db_field='x') |  | ||||||
|  |  | ||||||
|         Test.drop_collection() |         Test.drop_collection() | ||||||
|  |  | ||||||
|         test = Test() |         test = Test() | ||||||
|         test.my_map['DICTIONARY_KEY'] = Embedded(number=1) |         test.my_map["DICTIONARY_KEY"] = Embedded(number=1) | ||||||
|         test.save() |         test.save() | ||||||
|  |  | ||||||
|         Test.objects.update_one(inc__my_map__DICTIONARY_KEY__number=1) |         Test.objects.update_one(inc__my_map__DICTIONARY_KEY__number=1) | ||||||
|  |  | ||||||
|         test = Test.objects.get() |         test = Test.objects.get() | ||||||
|         self.assertEqual(test.my_map['DICTIONARY_KEY'].number, 2) |         assert test.my_map["DICTIONARY_KEY"].number == 2 | ||||||
|         doc = self.db.test.find_one() |         doc = self.db.test.find_one() | ||||||
|         self.assertEqual(doc['x']['DICTIONARY_KEY']['i'], 2) |         assert doc["x"]["DICTIONARY_KEY"]["i"] == 2 | ||||||
|  |  | ||||||
|     def test_mapfield_numerical_index(self): |     def test_mapfield_numerical_index(self): | ||||||
|         """Ensure that MapField accept numeric strings as indexes.""" |         """Ensure that MapField accept numeric strings as indexes.""" | ||||||
| @@ -90,9 +91,9 @@ class TestMapField(MongoDBTestCase): | |||||||
|         Test.drop_collection() |         Test.drop_collection() | ||||||
|  |  | ||||||
|         test = Test() |         test = Test() | ||||||
|         test.my_map['1'] = Embedded(name='test') |         test.my_map["1"] = Embedded(name="test") | ||||||
|         test.save() |         test.save() | ||||||
|         test.my_map['1'].name = 'test updated' |         test.my_map["1"].name = "test updated" | ||||||
|         test.save() |         test.save() | ||||||
|  |  | ||||||
|     def test_map_field_lookup(self): |     def test_map_field_lookup(self): | ||||||
| @@ -110,15 +111,20 @@ class TestMapField(MongoDBTestCase): | |||||||
|             actions = MapField(EmbeddedDocumentField(Action)) |             actions = MapField(EmbeddedDocumentField(Action)) | ||||||
|  |  | ||||||
|         Log.drop_collection() |         Log.drop_collection() | ||||||
|         Log(name="wilson", visited={'friends': datetime.datetime.now()}, |         Log( | ||||||
|             actions={'friends': Action(operation='drink', object='beer')}).save() |             name="wilson", | ||||||
|  |             visited={"friends": datetime.datetime.now()}, | ||||||
|  |             actions={"friends": Action(operation="drink", object="beer")}, | ||||||
|  |         ).save() | ||||||
|  |  | ||||||
|         self.assertEqual(1, Log.objects( |         assert 1 == Log.objects(visited__friends__exists=True).count() | ||||||
|             visited__friends__exists=True).count()) |  | ||||||
|  |  | ||||||
|         self.assertEqual(1, Log.objects( |         assert ( | ||||||
|             actions__friends__operation='drink', |             1 | ||||||
|             actions__friends__object='beer').count()) |             == Log.objects( | ||||||
|  |                 actions__friends__operation="drink", actions__friends__object="beer" | ||||||
|  |             ).count() | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     def test_map_field_unicode(self): |     def test_map_field_unicode(self): | ||||||
|         class Info(EmbeddedDocument): |         class Info(EmbeddedDocument): | ||||||
| @@ -130,15 +136,11 @@ class TestMapField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|         tree = BlogPost(info_dict={ |         tree = BlogPost(info_dict={u"éééé": {"description": u"VALUE: éééé"}}) | ||||||
|             u"éééé": { |  | ||||||
|                 'description': u"VALUE: éééé" |  | ||||||
|             } |  | ||||||
|         }) |  | ||||||
|  |  | ||||||
|         tree.save() |         tree.save() | ||||||
|  |  | ||||||
|         self.assertEqual( |         assert ( | ||||||
|             BlogPost.objects.get(id=tree.id).info_dict[u"éééé"].description, |             BlogPost.objects.get(id=tree.id).info_dict[u"éééé"].description | ||||||
|             u"VALUE: éééé" |             == u"VALUE: éééé" | ||||||
|         ) |         ) | ||||||
|   | |||||||
| @@ -1,8 +1,8 @@ | |||||||
| # -*- coding: utf-8 -*- | # -*- coding: utf-8 -*- | ||||||
| from bson import SON, DBRef | from bson import DBRef, SON | ||||||
|  | import pytest | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -24,19 +24,22 @@ class TestReferenceField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         # Make sure ReferenceField only accepts a document class or a string |         # Make sure ReferenceField only accepts a document class or a string | ||||||
|         # with a document class name. |         # with a document class name. | ||||||
|         self.assertRaises(ValidationError, ReferenceField, EmbeddedDocument) |         with pytest.raises(ValidationError): | ||||||
|  |             ReferenceField(EmbeddedDocument) | ||||||
|  |  | ||||||
|         user = User(name='Test User') |         user = User(name="Test User") | ||||||
|  |  | ||||||
|         # Ensure that the referenced object must have been saved |         # Ensure that the referenced object must have been saved | ||||||
|         post1 = BlogPost(content='Chips and gravy taste good.') |         post1 = BlogPost(content="Chips and gravy taste good.") | ||||||
|         post1.author = user |         post1.author = user | ||||||
|         self.assertRaises(ValidationError, post1.save) |         with pytest.raises(ValidationError): | ||||||
|  |             post1.save() | ||||||
|  |  | ||||||
|         # Check that an invalid object type cannot be used |         # Check that an invalid object type cannot be used | ||||||
|         post2 = BlogPost(content='Chips and chilli taste good.') |         post2 = BlogPost(content="Chips and chilli taste good.") | ||||||
|         post1.author = post2 |         post1.author = post2 | ||||||
|         self.assertRaises(ValidationError, post1.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             post1.validate() | ||||||
|  |  | ||||||
|         # Ensure ObjectID's are accepted as references |         # Ensure ObjectID's are accepted as references | ||||||
|         user_object_id = user.pk |         user_object_id = user.pk | ||||||
| @@ -52,42 +55,27 @@ class TestReferenceField(MongoDBTestCase): | |||||||
|         # Make sure referencing a saved document of the *wrong* type fails |         # Make sure referencing a saved document of the *wrong* type fails | ||||||
|         post2.save() |         post2.save() | ||||||
|         post1.author = post2 |         post1.author = post2 | ||||||
|         self.assertRaises(ValidationError, post1.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             post1.validate() | ||||||
|     def test_objectid_reference_fields(self): |  | ||||||
|         """Make sure storing Object ID references works.""" |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             parent = ReferenceField('self') |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         p1 = Person(name="John").save() |  | ||||||
|         Person(name="Ross", parent=p1.pk).save() |  | ||||||
|  |  | ||||||
|         p = Person.objects.get(name="Ross") |  | ||||||
|         self.assertEqual(p.parent, p1) |  | ||||||
|  |  | ||||||
|     def test_dbref_reference_fields(self): |     def test_dbref_reference_fields(self): | ||||||
|         """Make sure storing references as bson.dbref.DBRef works.""" |         """Make sure storing references as bson.dbref.DBRef works.""" | ||||||
|  |  | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             parent = ReferenceField('self', dbref=True) |             parent = ReferenceField("self", dbref=True) | ||||||
|  |  | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|  |  | ||||||
|         p1 = Person(name="John").save() |         p1 = Person(name="John").save() | ||||||
|         Person(name="Ross", parent=p1).save() |         Person(name="Ross", parent=p1).save() | ||||||
|  |  | ||||||
|         self.assertEqual( |         assert Person._get_collection().find_one({"name": "Ross"})["parent"] == DBRef( | ||||||
|             Person._get_collection().find_one({'name': 'Ross'})['parent'], |             "person", p1.pk | ||||||
|             DBRef('person', p1.pk) |  | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|         p = Person.objects.get(name="Ross") |         p = Person.objects.get(name="Ross") | ||||||
|         self.assertEqual(p.parent, p1) |         assert p.parent == p1 | ||||||
|  |  | ||||||
|     def test_dbref_to_mongo(self): |     def test_dbref_to_mongo(self): | ||||||
|         """Make sure that calling to_mongo on a ReferenceField which |         """Make sure that calling to_mongo on a ReferenceField which | ||||||
| @@ -97,21 +85,15 @@ class TestReferenceField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             parent = ReferenceField('self', dbref=False) |             parent = ReferenceField("self", dbref=False) | ||||||
|  |  | ||||||
|         p = Person( |         p = Person(name="Steve", parent=DBRef("person", "abcdefghijklmnop")) | ||||||
|             name='Steve', |         assert p.to_mongo() == SON([("name", u"Steve"), ("parent", "abcdefghijklmnop")]) | ||||||
|             parent=DBRef('person', 'abcdefghijklmnop') |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(p.to_mongo(), SON([ |  | ||||||
|             ('name', u'Steve'), |  | ||||||
|             ('parent', 'abcdefghijklmnop') |  | ||||||
|         ])) |  | ||||||
|  |  | ||||||
|     def test_objectid_reference_fields(self): |     def test_objectid_reference_fields(self): | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             parent = ReferenceField('self', dbref=False) |             parent = ReferenceField("self", dbref=False) | ||||||
|  |  | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|  |  | ||||||
| @@ -119,18 +101,19 @@ class TestReferenceField(MongoDBTestCase): | |||||||
|         Person(name="Ross", parent=p1).save() |         Person(name="Ross", parent=p1).save() | ||||||
|  |  | ||||||
|         col = Person._get_collection() |         col = Person._get_collection() | ||||||
|         data = col.find_one({'name': 'Ross'}) |         data = col.find_one({"name": "Ross"}) | ||||||
|         self.assertEqual(data['parent'], p1.pk) |         assert data["parent"] == p1.pk | ||||||
|  |  | ||||||
|         p = Person.objects.get(name="Ross") |         p = Person.objects.get(name="Ross") | ||||||
|         self.assertEqual(p.parent, p1) |         assert p.parent == p1 | ||||||
|  |  | ||||||
|     def test_undefined_reference(self): |     def test_undefined_reference(self): | ||||||
|         """Ensure that ReferenceFields may reference undefined Documents. |         """Ensure that ReferenceFields may reference undefined Documents. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class Product(Document): |         class Product(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             company = ReferenceField('Company') |             company = ReferenceField("Company") | ||||||
|  |  | ||||||
|         class Company(Document): |         class Company(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
| @@ -138,28 +121,29 @@ class TestReferenceField(MongoDBTestCase): | |||||||
|         Product.drop_collection() |         Product.drop_collection() | ||||||
|         Company.drop_collection() |         Company.drop_collection() | ||||||
|  |  | ||||||
|         ten_gen = Company(name='10gen') |         ten_gen = Company(name="10gen") | ||||||
|         ten_gen.save() |         ten_gen.save() | ||||||
|         mongodb = Product(name='MongoDB', company=ten_gen) |         mongodb = Product(name="MongoDB", company=ten_gen) | ||||||
|         mongodb.save() |         mongodb.save() | ||||||
|  |  | ||||||
|         me = Product(name='MongoEngine') |         me = Product(name="MongoEngine") | ||||||
|         me.save() |         me.save() | ||||||
|  |  | ||||||
|         obj = Product.objects(company=ten_gen).first() |         obj = Product.objects(company=ten_gen).first() | ||||||
|         self.assertEqual(obj, mongodb) |         assert obj == mongodb | ||||||
|         self.assertEqual(obj.company, ten_gen) |         assert obj.company == ten_gen | ||||||
|  |  | ||||||
|         obj = Product.objects(company=None).first() |         obj = Product.objects(company=None).first() | ||||||
|         self.assertEqual(obj, me) |         assert obj == me | ||||||
|  |  | ||||||
|         obj = Product.objects.get(company=None) |         obj = Product.objects.get(company=None) | ||||||
|         self.assertEqual(obj, me) |         assert obj == me | ||||||
|  |  | ||||||
|     def test_reference_query_conversion(self): |     def test_reference_query_conversion(self): | ||||||
|         """Ensure that ReferenceFields can be queried using objects and values |         """Ensure that ReferenceFields can be queried using objects and values | ||||||
|         of the type of the primary key of the referenced object. |         of the type of the primary key of the referenced object. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class Member(Document): |         class Member(Document): | ||||||
|             user_num = IntField(primary_key=True) |             user_num = IntField(primary_key=True) | ||||||
|  |  | ||||||
| @@ -175,22 +159,23 @@ class TestReferenceField(MongoDBTestCase): | |||||||
|         m2 = Member(user_num=2) |         m2 = Member(user_num=2) | ||||||
|         m2.save() |         m2.save() | ||||||
|  |  | ||||||
|         post1 = BlogPost(title='post 1', author=m1) |         post1 = BlogPost(title="post 1", author=m1) | ||||||
|         post1.save() |         post1.save() | ||||||
|  |  | ||||||
|         post2 = BlogPost(title='post 2', author=m2) |         post2 = BlogPost(title="post 2", author=m2) | ||||||
|         post2.save() |         post2.save() | ||||||
|  |  | ||||||
|         post = BlogPost.objects(author=m1).first() |         post = BlogPost.objects(author=m1).first() | ||||||
|         self.assertEqual(post.id, post1.id) |         assert post.id == post1.id | ||||||
|  |  | ||||||
|         post = BlogPost.objects(author=m2).first() |         post = BlogPost.objects(author=m2).first() | ||||||
|         self.assertEqual(post.id, post2.id) |         assert post.id == post2.id | ||||||
|  |  | ||||||
|     def test_reference_query_conversion_dbref(self): |     def test_reference_query_conversion_dbref(self): | ||||||
|         """Ensure that ReferenceFields can be queried using objects and values |         """Ensure that ReferenceFields can be queried using objects and values | ||||||
|         of the type of the primary key of the referenced object. |         of the type of the primary key of the referenced object. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class Member(Document): |         class Member(Document): | ||||||
|             user_num = IntField(primary_key=True) |             user_num = IntField(primary_key=True) | ||||||
|  |  | ||||||
| @@ -206,14 +191,14 @@ class TestReferenceField(MongoDBTestCase): | |||||||
|         m2 = Member(user_num=2) |         m2 = Member(user_num=2) | ||||||
|         m2.save() |         m2.save() | ||||||
|  |  | ||||||
|         post1 = BlogPost(title='post 1', author=m1) |         post1 = BlogPost(title="post 1", author=m1) | ||||||
|         post1.save() |         post1.save() | ||||||
|  |  | ||||||
|         post2 = BlogPost(title='post 2', author=m2) |         post2 = BlogPost(title="post 2", author=m2) | ||||||
|         post2.save() |         post2.save() | ||||||
|  |  | ||||||
|         post = BlogPost.objects(author=m1).first() |         post = BlogPost.objects(author=m1).first() | ||||||
|         self.assertEqual(post.id, post1.id) |         assert post.id == post1.id | ||||||
|  |  | ||||||
|         post = BlogPost.objects(author=m2).first() |         post = BlogPost.objects(author=m2).first() | ||||||
|         self.assertEqual(post.id, post2.id) |         assert post.id == post2.id | ||||||
|   | |||||||
| @@ -11,79 +11,79 @@ class TestSequenceField(MongoDBTestCase): | |||||||
|             id = SequenceField(primary_key=True) |             id = SequenceField(primary_key=True) | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|         self.db['mongoengine.counters'].drop() |         self.db["mongoengine.counters"].drop() | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|  |  | ||||||
|         for x in range(10): |         for x in range(10): | ||||||
|             Person(name="Person %s" % x).save() |             Person(name="Person %s" % x).save() | ||||||
|  |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) |         c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) | ||||||
|         self.assertEqual(c['next'], 10) |         assert c["next"] == 10 | ||||||
|  |  | ||||||
|         ids = [i.id for i in Person.objects] |         ids = [i.id for i in Person.objects] | ||||||
|         self.assertEqual(ids, range(1, 11)) |         assert ids == list(range(1, 11)) | ||||||
|  |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) |         c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) | ||||||
|         self.assertEqual(c['next'], 10) |         assert c["next"] == 10 | ||||||
|  |  | ||||||
|         Person.id.set_next_value(1000) |         Person.id.set_next_value(1000) | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) |         c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) | ||||||
|         self.assertEqual(c['next'], 1000) |         assert c["next"] == 1000 | ||||||
|  |  | ||||||
|     def test_sequence_field_get_next_value(self): |     def test_sequence_field_get_next_value(self): | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             id = SequenceField(primary_key=True) |             id = SequenceField(primary_key=True) | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|         self.db['mongoengine.counters'].drop() |         self.db["mongoengine.counters"].drop() | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|  |  | ||||||
|         for x in range(10): |         for x in range(10): | ||||||
|             Person(name="Person %s" % x).save() |             Person(name="Person %s" % x).save() | ||||||
|  |  | ||||||
|         self.assertEqual(Person.id.get_next_value(), 11) |         assert Person.id.get_next_value() == 11 | ||||||
|         self.db['mongoengine.counters'].drop() |         self.db["mongoengine.counters"].drop() | ||||||
|  |  | ||||||
|         self.assertEqual(Person.id.get_next_value(), 1) |         assert Person.id.get_next_value() == 1 | ||||||
|  |  | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             id = SequenceField(primary_key=True, value_decorator=str) |             id = SequenceField(primary_key=True, value_decorator=str) | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|         self.db['mongoengine.counters'].drop() |         self.db["mongoengine.counters"].drop() | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|  |  | ||||||
|         for x in range(10): |         for x in range(10): | ||||||
|             Person(name="Person %s" % x).save() |             Person(name="Person %s" % x).save() | ||||||
|  |  | ||||||
|         self.assertEqual(Person.id.get_next_value(), '11') |         assert Person.id.get_next_value() == "11" | ||||||
|         self.db['mongoengine.counters'].drop() |         self.db["mongoengine.counters"].drop() | ||||||
|  |  | ||||||
|         self.assertEqual(Person.id.get_next_value(), '1') |         assert Person.id.get_next_value() == "1" | ||||||
|  |  | ||||||
|     def test_sequence_field_sequence_name(self): |     def test_sequence_field_sequence_name(self): | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             id = SequenceField(primary_key=True, sequence_name='jelly') |             id = SequenceField(primary_key=True, sequence_name="jelly") | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|         self.db['mongoengine.counters'].drop() |         self.db["mongoengine.counters"].drop() | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|  |  | ||||||
|         for x in range(10): |         for x in range(10): | ||||||
|             Person(name="Person %s" % x).save() |             Person(name="Person %s" % x).save() | ||||||
|  |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'}) |         c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"}) | ||||||
|         self.assertEqual(c['next'], 10) |         assert c["next"] == 10 | ||||||
|  |  | ||||||
|         ids = [i.id for i in Person.objects] |         ids = [i.id for i in Person.objects] | ||||||
|         self.assertEqual(ids, range(1, 11)) |         assert ids == list(range(1, 11)) | ||||||
|  |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'}) |         c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"}) | ||||||
|         self.assertEqual(c['next'], 10) |         assert c["next"] == 10 | ||||||
|  |  | ||||||
|         Person.id.set_next_value(1000) |         Person.id.set_next_value(1000) | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'}) |         c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"}) | ||||||
|         self.assertEqual(c['next'], 1000) |         assert c["next"] == 1000 | ||||||
|  |  | ||||||
|     def test_multiple_sequence_fields(self): |     def test_multiple_sequence_fields(self): | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
| @@ -91,56 +91,56 @@ class TestSequenceField(MongoDBTestCase): | |||||||
|             counter = SequenceField() |             counter = SequenceField() | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|         self.db['mongoengine.counters'].drop() |         self.db["mongoengine.counters"].drop() | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|  |  | ||||||
|         for x in range(10): |         for x in range(10): | ||||||
|             Person(name="Person %s" % x).save() |             Person(name="Person %s" % x).save() | ||||||
|  |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) |         c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) | ||||||
|         self.assertEqual(c['next'], 10) |         assert c["next"] == 10 | ||||||
|  |  | ||||||
|         ids = [i.id for i in Person.objects] |         ids = [i.id for i in Person.objects] | ||||||
|         self.assertEqual(ids, range(1, 11)) |         assert ids == list(range(1, 11)) | ||||||
|  |  | ||||||
|         counters = [i.counter for i in Person.objects] |         counters = [i.counter for i in Person.objects] | ||||||
|         self.assertEqual(counters, range(1, 11)) |         assert counters == list(range(1, 11)) | ||||||
|  |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) |         c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) | ||||||
|         self.assertEqual(c['next'], 10) |         assert c["next"] == 10 | ||||||
|  |  | ||||||
|         Person.id.set_next_value(1000) |         Person.id.set_next_value(1000) | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) |         c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) | ||||||
|         self.assertEqual(c['next'], 1000) |         assert c["next"] == 1000 | ||||||
|  |  | ||||||
|         Person.counter.set_next_value(999) |         Person.counter.set_next_value(999) | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'person.counter'}) |         c = self.db["mongoengine.counters"].find_one({"_id": "person.counter"}) | ||||||
|         self.assertEqual(c['next'], 999) |         assert c["next"] == 999 | ||||||
|  |  | ||||||
|     def test_sequence_fields_reload(self): |     def test_sequence_fields_reload(self): | ||||||
|         class Animal(Document): |         class Animal(Document): | ||||||
|             counter = SequenceField() |             counter = SequenceField() | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|         self.db['mongoengine.counters'].drop() |         self.db["mongoengine.counters"].drop() | ||||||
|         Animal.drop_collection() |         Animal.drop_collection() | ||||||
|  |  | ||||||
|         a = Animal(name="Boi").save() |         a = Animal(name="Boi").save() | ||||||
|  |  | ||||||
|         self.assertEqual(a.counter, 1) |         assert a.counter == 1 | ||||||
|         a.reload() |         a.reload() | ||||||
|         self.assertEqual(a.counter, 1) |         assert a.counter == 1 | ||||||
|  |  | ||||||
|         a.counter = None |         a.counter = None | ||||||
|         self.assertEqual(a.counter, 2) |         assert a.counter == 2 | ||||||
|         a.save() |         a.save() | ||||||
|  |  | ||||||
|         self.assertEqual(a.counter, 2) |         assert a.counter == 2 | ||||||
|  |  | ||||||
|         a = Animal.objects.first() |         a = Animal.objects.first() | ||||||
|         self.assertEqual(a.counter, 2) |         assert a.counter == 2 | ||||||
|         a.reload() |         a.reload() | ||||||
|         self.assertEqual(a.counter, 2) |         assert a.counter == 2 | ||||||
|  |  | ||||||
|     def test_multiple_sequence_fields_on_docs(self): |     def test_multiple_sequence_fields_on_docs(self): | ||||||
|         class Animal(Document): |         class Animal(Document): | ||||||
| @@ -151,7 +151,7 @@ class TestSequenceField(MongoDBTestCase): | |||||||
|             id = SequenceField(primary_key=True) |             id = SequenceField(primary_key=True) | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|         self.db['mongoengine.counters'].drop() |         self.db["mongoengine.counters"].drop() | ||||||
|         Animal.drop_collection() |         Animal.drop_collection() | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|  |  | ||||||
| @@ -159,44 +159,44 @@ class TestSequenceField(MongoDBTestCase): | |||||||
|             Animal(name="Animal %s" % x).save() |             Animal(name="Animal %s" % x).save() | ||||||
|             Person(name="Person %s" % x).save() |             Person(name="Person %s" % x).save() | ||||||
|  |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) |         c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) | ||||||
|         self.assertEqual(c['next'], 10) |         assert c["next"] == 10 | ||||||
|  |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'}) |         c = self.db["mongoengine.counters"].find_one({"_id": "animal.id"}) | ||||||
|         self.assertEqual(c['next'], 10) |         assert c["next"] == 10 | ||||||
|  |  | ||||||
|         ids = [i.id for i in Person.objects] |         ids = [i.id for i in Person.objects] | ||||||
|         self.assertEqual(ids, range(1, 11)) |         assert ids == list(range(1, 11)) | ||||||
|  |  | ||||||
|         id = [i.id for i in Animal.objects] |         id = [i.id for i in Animal.objects] | ||||||
|         self.assertEqual(id, range(1, 11)) |         assert id == list(range(1, 11)) | ||||||
|  |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) |         c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) | ||||||
|         self.assertEqual(c['next'], 10) |         assert c["next"] == 10 | ||||||
|  |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'}) |         c = self.db["mongoengine.counters"].find_one({"_id": "animal.id"}) | ||||||
|         self.assertEqual(c['next'], 10) |         assert c["next"] == 10 | ||||||
|  |  | ||||||
|     def test_sequence_field_value_decorator(self): |     def test_sequence_field_value_decorator(self): | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             id = SequenceField(primary_key=True, value_decorator=str) |             id = SequenceField(primary_key=True, value_decorator=str) | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|         self.db['mongoengine.counters'].drop() |         self.db["mongoengine.counters"].drop() | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|  |  | ||||||
|         for x in range(10): |         for x in range(10): | ||||||
|             p = Person(name="Person %s" % x) |             p = Person(name="Person %s" % x) | ||||||
|             p.save() |             p.save() | ||||||
|  |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) |         c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) | ||||||
|         self.assertEqual(c['next'], 10) |         assert c["next"] == 10 | ||||||
|  |  | ||||||
|         ids = [i.id for i in Person.objects] |         ids = [i.id for i in Person.objects] | ||||||
|         self.assertEqual(ids, map(str, range(1, 11))) |         assert ids == [str(i) for i in range(1, 11)] | ||||||
|  |  | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) |         c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) | ||||||
|         self.assertEqual(c['next'], 10) |         assert c["next"] == 10 | ||||||
|  |  | ||||||
|     def test_embedded_sequence_field(self): |     def test_embedded_sequence_field(self): | ||||||
|         class Comment(EmbeddedDocument): |         class Comment(EmbeddedDocument): | ||||||
| @@ -207,23 +207,27 @@ class TestSequenceField(MongoDBTestCase): | |||||||
|             title = StringField(required=True) |             title = StringField(required=True) | ||||||
|             comments = ListField(EmbeddedDocumentField(Comment)) |             comments = ListField(EmbeddedDocumentField(Comment)) | ||||||
|  |  | ||||||
|         self.db['mongoengine.counters'].drop() |         self.db["mongoengine.counters"].drop() | ||||||
|         Post.drop_collection() |         Post.drop_collection() | ||||||
|  |  | ||||||
|         Post(title="MongoEngine", |         Post( | ||||||
|              comments=[Comment(content="NoSQL Rocks"), |             title="MongoEngine", | ||||||
|                        Comment(content="MongoEngine Rocks")]).save() |             comments=[ | ||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'comment.id'}) |                 Comment(content="NoSQL Rocks"), | ||||||
|         self.assertEqual(c['next'], 2) |                 Comment(content="MongoEngine Rocks"), | ||||||
|  |             ], | ||||||
|  |         ).save() | ||||||
|  |         c = self.db["mongoengine.counters"].find_one({"_id": "comment.id"}) | ||||||
|  |         assert c["next"] == 2 | ||||||
|         post = Post.objects.first() |         post = Post.objects.first() | ||||||
|         self.assertEqual(1, post.comments[0].id) |         assert 1 == post.comments[0].id | ||||||
|         self.assertEqual(2, post.comments[1].id) |         assert 2 == post.comments[1].id | ||||||
|  |  | ||||||
|     def test_inherited_sequencefield(self): |     def test_inherited_sequencefield(self): | ||||||
|         class Base(Document): |         class Base(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             counter = SequenceField() |             counter = SequenceField() | ||||||
|             meta = {'abstract': True} |             meta = {"abstract": True} | ||||||
|  |  | ||||||
|         class Foo(Base): |         class Foo(Base): | ||||||
|             pass |             pass | ||||||
| @@ -231,24 +235,25 @@ class TestSequenceField(MongoDBTestCase): | |||||||
|         class Bar(Base): |         class Bar(Base): | ||||||
|             pass |             pass | ||||||
|  |  | ||||||
|         bar = Bar(name='Bar') |         bar = Bar(name="Bar") | ||||||
|         bar.save() |         bar.save() | ||||||
|  |  | ||||||
|         foo = Foo(name='Foo') |         foo = Foo(name="Foo") | ||||||
|         foo.save() |         foo.save() | ||||||
|  |  | ||||||
|         self.assertTrue('base.counter' in |         assert "base.counter" in self.db["mongoengine.counters"].find().distinct("_id") | ||||||
|                         self.db['mongoengine.counters'].find().distinct('_id')) |         assert not ( | ||||||
|         self.assertFalse(('foo.counter' or 'bar.counter') in |             ("foo.counter" or "bar.counter") | ||||||
|                          self.db['mongoengine.counters'].find().distinct('_id')) |             in self.db["mongoengine.counters"].find().distinct("_id") | ||||||
|         self.assertNotEqual(foo.counter, bar.counter) |         ) | ||||||
|         self.assertEqual(foo._fields['counter'].owner_document, Base) |         assert foo.counter != bar.counter | ||||||
|         self.assertEqual(bar._fields['counter'].owner_document, Base) |         assert foo._fields["counter"].owner_document == Base | ||||||
|  |         assert bar._fields["counter"].owner_document == Base | ||||||
|  |  | ||||||
|     def test_no_inherited_sequencefield(self): |     def test_no_inherited_sequencefield(self): | ||||||
|         class Base(Document): |         class Base(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             meta = {'abstract': True} |             meta = {"abstract": True} | ||||||
|  |  | ||||||
|         class Foo(Base): |         class Foo(Base): | ||||||
|             counter = SequenceField() |             counter = SequenceField() | ||||||
| @@ -256,16 +261,18 @@ class TestSequenceField(MongoDBTestCase): | |||||||
|         class Bar(Base): |         class Bar(Base): | ||||||
|             counter = SequenceField() |             counter = SequenceField() | ||||||
|  |  | ||||||
|         bar = Bar(name='Bar') |         bar = Bar(name="Bar") | ||||||
|         bar.save() |         bar.save() | ||||||
|  |  | ||||||
|         foo = Foo(name='Foo') |         foo = Foo(name="Foo") | ||||||
|         foo.save() |         foo.save() | ||||||
|  |  | ||||||
|         self.assertFalse('base.counter' in |         assert "base.counter" not in self.db["mongoengine.counters"].find().distinct( | ||||||
|                          self.db['mongoengine.counters'].find().distinct('_id')) |             "_id" | ||||||
|         self.assertTrue(('foo.counter' and 'bar.counter') in |         ) | ||||||
|                          self.db['mongoengine.counters'].find().distinct('_id')) |         existing_counters = self.db["mongoengine.counters"].find().distinct("_id") | ||||||
|         self.assertEqual(foo.counter, bar.counter) |         assert "foo.counter" in existing_counters | ||||||
|         self.assertEqual(foo._fields['counter'].owner_document, Foo) |         assert "bar.counter" in existing_counters | ||||||
|         self.assertEqual(bar._fields['counter'].owner_document, Bar) |         assert foo.counter == bar.counter | ||||||
|  |         assert foo._fields["counter"].owner_document == Foo | ||||||
|  |         assert bar._fields["counter"].owner_document == Bar | ||||||
|   | |||||||
| @@ -1,53 +1,60 @@ | |||||||
| # -*- coding: utf-8 -*- | # -*- coding: utf-8 -*- | ||||||
| from mongoengine import * | import pytest | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
| from tests.utils import MongoDBTestCase | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestURLField(MongoDBTestCase): | class TestURLField(MongoDBTestCase): | ||||||
|  |  | ||||||
|     def test_validation(self): |     def test_validation(self): | ||||||
|         """Ensure that URLFields validate urls properly.""" |         """Ensure that URLFields validate urls properly.""" | ||||||
|  |  | ||||||
|         class Link(Document): |         class Link(Document): | ||||||
|             url = URLField() |             url = URLField() | ||||||
|  |  | ||||||
|         link = Link() |         link = Link() | ||||||
|         link.url = 'google' |         link.url = "google" | ||||||
|         self.assertRaises(ValidationError, link.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             link.validate() | ||||||
|  |  | ||||||
|         link.url = 'http://www.google.com:8080' |         link.url = "http://www.google.com:8080" | ||||||
|         link.validate() |         link.validate() | ||||||
|  |  | ||||||
|     def test_unicode_url_validation(self): |     def test_unicode_url_validation(self): | ||||||
|         """Ensure unicode URLs are validated properly.""" |         """Ensure unicode URLs are validated properly.""" | ||||||
|  |  | ||||||
|         class Link(Document): |         class Link(Document): | ||||||
|             url = URLField() |             url = URLField() | ||||||
|  |  | ||||||
|         link = Link() |         link = Link() | ||||||
|         link.url = u'http://привет.com' |         link.url = u"http://привет.com" | ||||||
|  |  | ||||||
|         # TODO fix URL validation - this *IS* a valid URL |         # TODO fix URL validation - this *IS* a valid URL | ||||||
|         # For now we just want to make sure that the error message is correct |         # For now we just want to make sure that the error message is correct | ||||||
|         with self.assertRaises(ValidationError) as ctx_err: |         with pytest.raises(ValidationError) as exc_info: | ||||||
|             link.validate() |             link.validate() | ||||||
|         self.assertEqual(unicode(ctx_err.exception), |         assert ( | ||||||
|                          u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])") |             str(exc_info.value) | ||||||
|  |             == u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])" | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     def test_url_scheme_validation(self): |     def test_url_scheme_validation(self): | ||||||
|         """Ensure that URLFields validate urls with specific schemes properly. |         """Ensure that URLFields validate urls with specific schemes properly. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class Link(Document): |         class Link(Document): | ||||||
|             url = URLField() |             url = URLField() | ||||||
|  |  | ||||||
|         class SchemeLink(Document): |         class SchemeLink(Document): | ||||||
|             url = URLField(schemes=['ws', 'irc']) |             url = URLField(schemes=["ws", "irc"]) | ||||||
|  |  | ||||||
|         link = Link() |         link = Link() | ||||||
|         link.url = 'ws://google.com' |         link.url = "ws://google.com" | ||||||
|         self.assertRaises(ValidationError, link.validate) |         with pytest.raises(ValidationError): | ||||||
|  |             link.validate() | ||||||
|  |  | ||||||
|         scheme_link = SchemeLink() |         scheme_link = SchemeLink() | ||||||
|         scheme_link.url = 'ws://google.com' |         scheme_link.url = "ws://google.com" | ||||||
|         scheme_link.validate() |         scheme_link.validate() | ||||||
|  |  | ||||||
|     def test_underscore_allowed_in_domains_names(self): |     def test_underscore_allowed_in_domains_names(self): | ||||||
| @@ -55,5 +62,5 @@ class TestURLField(MongoDBTestCase): | |||||||
|             url = URLField() |             url = URLField() | ||||||
|  |  | ||||||
|         link = Link() |         link = Link() | ||||||
|         link.url = 'https://san_leandro-ca.geebo.com' |         link.url = "https://san_leandro-ca.geebo.com" | ||||||
|         link.validate() |         link.validate() | ||||||
|   | |||||||
| @@ -1,8 +1,9 @@ | |||||||
| # -*- coding: utf-8 -*- | # -*- coding: utf-8 -*- | ||||||
| import uuid | import uuid | ||||||
|  |  | ||||||
| from mongoengine import * | import pytest | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
| from tests.utils import MongoDBTestCase, get_as_pymongo | from tests.utils import MongoDBTestCase, get_as_pymongo | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -14,12 +15,7 @@ class TestUUIDField(MongoDBTestCase): | |||||||
|     def test_storage(self): |     def test_storage(self): | ||||||
|         uid = uuid.uuid4() |         uid = uuid.uuid4() | ||||||
|         person = Person(api_key=uid).save() |         person = Person(api_key=uid).save() | ||||||
|         self.assertEqual( |         assert get_as_pymongo(person) == {"_id": person.id, "api_key": str(uid)} | ||||||
|             get_as_pymongo(person), |  | ||||||
|             {'_id': person.id, |  | ||||||
|              'api_key': str(uid) |  | ||||||
|              } |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|     def test_field_string(self): |     def test_field_string(self): | ||||||
|         """Test UUID fields storing as String |         """Test UUID fields storing as String | ||||||
| @@ -28,8 +24,8 @@ class TestUUIDField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         uu = uuid.uuid4() |         uu = uuid.uuid4() | ||||||
|         Person(api_key=uu).save() |         Person(api_key=uu).save() | ||||||
|         self.assertEqual(1, Person.objects(api_key=uu).count()) |         assert 1 == Person.objects(api_key=uu).count() | ||||||
|         self.assertEqual(uu, Person.objects.first().api_key) |         assert uu == Person.objects.first().api_key | ||||||
|  |  | ||||||
|         person = Person() |         person = Person() | ||||||
|         valid = (uuid.uuid4(), uuid.uuid1()) |         valid = (uuid.uuid4(), uuid.uuid1()) | ||||||
| @@ -37,11 +33,14 @@ class TestUUIDField(MongoDBTestCase): | |||||||
|             person.api_key = api_key |             person.api_key = api_key | ||||||
|             person.validate() |             person.validate() | ||||||
|  |  | ||||||
|         invalid = ('9d159858-549b-4975-9f98-dd2f987c113g', |         invalid = ( | ||||||
|                    '9d159858-549b-4975-9f98-dd2f987c113') |             "9d159858-549b-4975-9f98-dd2f987c113g", | ||||||
|  |             "9d159858-549b-4975-9f98-dd2f987c113", | ||||||
|  |         ) | ||||||
|         for api_key in invalid: |         for api_key in invalid: | ||||||
|             person.api_key = api_key |             person.api_key = api_key | ||||||
|             self.assertRaises(ValidationError, person.validate) |             with pytest.raises(ValidationError): | ||||||
|  |                 person.validate() | ||||||
|  |  | ||||||
|     def test_field_binary(self): |     def test_field_binary(self): | ||||||
|         """Test UUID fields storing as Binary object.""" |         """Test UUID fields storing as Binary object.""" | ||||||
| @@ -49,8 +48,8 @@ class TestUUIDField(MongoDBTestCase): | |||||||
|  |  | ||||||
|         uu = uuid.uuid4() |         uu = uuid.uuid4() | ||||||
|         Person(api_key=uu).save() |         Person(api_key=uu).save() | ||||||
|         self.assertEqual(1, Person.objects(api_key=uu).count()) |         assert 1 == Person.objects(api_key=uu).count() | ||||||
|         self.assertEqual(uu, Person.objects.first().api_key) |         assert uu == Person.objects.first().api_key | ||||||
|  |  | ||||||
|         person = Person() |         person = Person() | ||||||
|         valid = (uuid.uuid4(), uuid.uuid1()) |         valid = (uuid.uuid4(), uuid.uuid1()) | ||||||
| @@ -58,8 +57,11 @@ class TestUUIDField(MongoDBTestCase): | |||||||
|             person.api_key = api_key |             person.api_key = api_key | ||||||
|             person.validate() |             person.validate() | ||||||
|  |  | ||||||
|         invalid = ('9d159858-549b-4975-9f98-dd2f987c113g', |         invalid = ( | ||||||
|                    '9d159858-549b-4975-9f98-dd2f987c113') |             "9d159858-549b-4975-9f98-dd2f987c113g", | ||||||
|  |             "9d159858-549b-4975-9f98-dd2f987c113", | ||||||
|  |         ) | ||||||
|         for api_key in invalid: |         for api_key in invalid: | ||||||
|             person.api_key = api_key |             person.api_key = api_key | ||||||
|             self.assertRaises(ValidationError, person.validate) |             with pytest.raises(ValidationError): | ||||||
|  |                 person.validate() | ||||||
|   | |||||||
| @@ -11,7 +11,7 @@ class PickleEmbedded(EmbeddedDocument): | |||||||
|  |  | ||||||
| class PickleTest(Document): | class PickleTest(Document): | ||||||
|     number = IntField() |     number = IntField() | ||||||
|     string = StringField(choices=(('One', '1'), ('Two', '2'))) |     string = StringField(choices=(("One", "1"), ("Two", "2"))) | ||||||
|     embedded = EmbeddedDocumentField(PickleEmbedded) |     embedded = EmbeddedDocumentField(PickleEmbedded) | ||||||
|     lists = ListField(StringField()) |     lists = ListField(StringField()) | ||||||
|     photo = FileField() |     photo = FileField() | ||||||
| @@ -19,7 +19,7 @@ class PickleTest(Document): | |||||||
|  |  | ||||||
| class NewDocumentPickleTest(Document): | class NewDocumentPickleTest(Document): | ||||||
|     number = IntField() |     number = IntField() | ||||||
|     string = StringField(choices=(('One', '1'), ('Two', '2'))) |     string = StringField(choices=(("One", "1"), ("Two", "2"))) | ||||||
|     embedded = EmbeddedDocumentField(PickleEmbedded) |     embedded = EmbeddedDocumentField(PickleEmbedded) | ||||||
|     lists = ListField(StringField()) |     lists = ListField(StringField()) | ||||||
|     photo = FileField() |     photo = FileField() | ||||||
| @@ -36,17 +36,17 @@ class PickleDynamicTest(DynamicDocument): | |||||||
|  |  | ||||||
| class PickleSignalsTest(Document): | class PickleSignalsTest(Document): | ||||||
|     number = IntField() |     number = IntField() | ||||||
|     string = StringField(choices=(('One', '1'), ('Two', '2'))) |     string = StringField(choices=(("One", "1"), ("Two", "2"))) | ||||||
|     embedded = EmbeddedDocumentField(PickleEmbedded) |     embedded = EmbeddedDocumentField(PickleEmbedded) | ||||||
|     lists = ListField(StringField()) |     lists = ListField(StringField()) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def post_save(self, sender, document, created, **kwargs): |     def post_save(self, sender, document, created, **kwargs): | ||||||
|         pickled = pickle.dumps(document) |         pickle.dumps(document) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def post_delete(self, sender, document, **kwargs): |     def post_delete(self, sender, document, **kwargs): | ||||||
|         pickled = pickle.dumps(document) |         pickle.dumps(document) | ||||||
|  |  | ||||||
|  |  | ||||||
| signals.post_save.connect(PickleSignalsTest.post_save, sender=PickleSignalsTest) | signals.post_save.connect(PickleSignalsTest.post_save, sender=PickleSignalsTest) | ||||||
| @@ -58,4 +58,4 @@ class Mixin(object): | |||||||
|  |  | ||||||
|  |  | ||||||
| class Base(Document): | class Base(Document): | ||||||
|     meta = {'allow_inheritance': True} |     meta = {"allow_inheritance": True} | ||||||
|   | |||||||
| @@ -1,6 +0,0 @@ | |||||||
| from .transform import * |  | ||||||
| from .field_list import * |  | ||||||
| from .queryset import * |  | ||||||
| from .visitor import * |  | ||||||
| from .geo import * |  | ||||||
| from .modify import * |  | ||||||
|   | |||||||
| @@ -1,440 +0,0 @@ | |||||||
| import unittest |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
| from mongoengine.queryset import QueryFieldList |  | ||||||
|  |  | ||||||
| __all__ = ("QueryFieldListTest", "OnlyExcludeAllTest") |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class QueryFieldListTest(unittest.TestCase): |  | ||||||
|  |  | ||||||
|     def test_empty(self): |  | ||||||
|         q = QueryFieldList() |  | ||||||
|         self.assertFalse(q) |  | ||||||
|  |  | ||||||
|         q = QueryFieldList(always_include=['_cls']) |  | ||||||
|         self.assertFalse(q) |  | ||||||
|  |  | ||||||
|     def test_include_include(self): |  | ||||||
|         q = QueryFieldList() |  | ||||||
|         q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY, _only_called=True) |  | ||||||
|         self.assertEqual(q.as_dict(), {'a': 1, 'b': 1}) |  | ||||||
|         q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) |  | ||||||
|         self.assertEqual(q.as_dict(), {'a': 1, 'b': 1, 'c': 1}) |  | ||||||
|  |  | ||||||
|     def test_include_exclude(self): |  | ||||||
|         q = QueryFieldList() |  | ||||||
|         q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY) |  | ||||||
|         self.assertEqual(q.as_dict(), {'a': 1, 'b': 1}) |  | ||||||
|         q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE) |  | ||||||
|         self.assertEqual(q.as_dict(), {'a': 1}) |  | ||||||
|  |  | ||||||
|     def test_exclude_exclude(self): |  | ||||||
|         q = QueryFieldList() |  | ||||||
|         q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE) |  | ||||||
|         self.assertEqual(q.as_dict(), {'a': 0, 'b': 0}) |  | ||||||
|         q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE) |  | ||||||
|         self.assertEqual(q.as_dict(), {'a': 0, 'b': 0, 'c': 0}) |  | ||||||
|  |  | ||||||
|     def test_exclude_include(self): |  | ||||||
|         q = QueryFieldList() |  | ||||||
|         q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE) |  | ||||||
|         self.assertEqual(q.as_dict(), {'a': 0, 'b': 0}) |  | ||||||
|         q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) |  | ||||||
|         self.assertEqual(q.as_dict(), {'c': 1}) |  | ||||||
|  |  | ||||||
|     def test_always_include(self): |  | ||||||
|         q = QueryFieldList(always_include=['x', 'y']) |  | ||||||
|         q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE) |  | ||||||
|         q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) |  | ||||||
|         self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'c': 1}) |  | ||||||
|  |  | ||||||
|     def test_reset(self): |  | ||||||
|         q = QueryFieldList(always_include=['x', 'y']) |  | ||||||
|         q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE) |  | ||||||
|         q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) |  | ||||||
|         self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'c': 1}) |  | ||||||
|         q.reset() |  | ||||||
|         self.assertFalse(q) |  | ||||||
|         q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) |  | ||||||
|         self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'b': 1, 'c': 1}) |  | ||||||
|  |  | ||||||
|     def test_using_a_slice(self): |  | ||||||
|         q = QueryFieldList() |  | ||||||
|         q += QueryFieldList(fields=['a'], value={"$slice": 5}) |  | ||||||
|         self.assertEqual(q.as_dict(), {'a': {"$slice": 5}}) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class OnlyExcludeAllTest(unittest.TestCase): |  | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         connect(db='mongoenginetest') |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             age = IntField() |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|         self.Person = Person |  | ||||||
|  |  | ||||||
|     def test_mixing_only_exclude(self): |  | ||||||
|  |  | ||||||
|         class MyDoc(Document): |  | ||||||
|             a = StringField() |  | ||||||
|             b = StringField() |  | ||||||
|             c = StringField() |  | ||||||
|             d = StringField() |  | ||||||
|             e = StringField() |  | ||||||
|             f = StringField() |  | ||||||
|  |  | ||||||
|         include = ['a', 'b', 'c', 'd', 'e'] |  | ||||||
|         exclude = ['d', 'e'] |  | ||||||
|         only = ['b', 'c'] |  | ||||||
|  |  | ||||||
|         qs = MyDoc.objects.fields(**{i: 1 for i in include}) |  | ||||||
|         self.assertEqual(qs._loaded_fields.as_dict(), |  | ||||||
|                          {'a': 1, 'b': 1, 'c': 1, 'd': 1, 'e': 1}) |  | ||||||
|         qs = qs.only(*only) |  | ||||||
|         self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) |  | ||||||
|         qs = qs.exclude(*exclude) |  | ||||||
|         self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) |  | ||||||
|  |  | ||||||
|         qs = MyDoc.objects.fields(**{i: 1 for i in include}) |  | ||||||
|         qs = qs.exclude(*exclude) |  | ||||||
|         self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1}) |  | ||||||
|         qs = qs.only(*only) |  | ||||||
|         self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) |  | ||||||
|  |  | ||||||
|         qs = MyDoc.objects.exclude(*exclude) |  | ||||||
|         qs = qs.fields(**{i: 1 for i in include}) |  | ||||||
|         self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1}) |  | ||||||
|         qs = qs.only(*only) |  | ||||||
|         self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) |  | ||||||
|  |  | ||||||
|     def test_slicing(self): |  | ||||||
|  |  | ||||||
|         class MyDoc(Document): |  | ||||||
|             a = ListField() |  | ||||||
|             b = ListField() |  | ||||||
|             c = ListField() |  | ||||||
|             d = ListField() |  | ||||||
|             e = ListField() |  | ||||||
|             f = ListField() |  | ||||||
|  |  | ||||||
|         include = ['a', 'b', 'c', 'd', 'e'] |  | ||||||
|         exclude = ['d', 'e'] |  | ||||||
|         only = ['b', 'c'] |  | ||||||
|  |  | ||||||
|         qs = MyDoc.objects.fields(**{i: 1 for i in include}) |  | ||||||
|         qs = qs.exclude(*exclude) |  | ||||||
|         qs = qs.only(*only) |  | ||||||
|         qs = qs.fields(slice__b=5) |  | ||||||
|         self.assertEqual(qs._loaded_fields.as_dict(), |  | ||||||
|                          {'b': {'$slice': 5}, 'c': 1}) |  | ||||||
|  |  | ||||||
|         qs = qs.fields(slice__c=[5, 1]) |  | ||||||
|         self.assertEqual(qs._loaded_fields.as_dict(), |  | ||||||
|                          {'b': {'$slice': 5}, 'c': {'$slice': [5, 1]}}) |  | ||||||
|  |  | ||||||
|         qs = qs.exclude('c') |  | ||||||
|         self.assertEqual(qs._loaded_fields.as_dict(), |  | ||||||
|                          {'b': {'$slice': 5}}) |  | ||||||
|  |  | ||||||
|     def test_mix_slice_with_other_fields(self): |  | ||||||
|         class MyDoc(Document): |  | ||||||
|             a = ListField() |  | ||||||
|             b = ListField() |  | ||||||
|             c = ListField() |  | ||||||
|  |  | ||||||
|         qs = MyDoc.objects.fields(a=1, b=0, slice__c=2) |  | ||||||
|         self.assertEqual(qs._loaded_fields.as_dict(), |  | ||||||
|                          {'c': {'$slice': 2}, 'a': 1}) |  | ||||||
|  |  | ||||||
|     def test_only(self): |  | ||||||
|         """Ensure that QuerySet.only only returns the requested fields. |  | ||||||
|         """ |  | ||||||
|         person = self.Person(name='test', age=25) |  | ||||||
|         person.save() |  | ||||||
|  |  | ||||||
|         obj = self.Person.objects.only('name').get() |  | ||||||
|         self.assertEqual(obj.name, person.name) |  | ||||||
|         self.assertEqual(obj.age, None) |  | ||||||
|  |  | ||||||
|         obj = self.Person.objects.only('age').get() |  | ||||||
|         self.assertEqual(obj.name, None) |  | ||||||
|         self.assertEqual(obj.age, person.age) |  | ||||||
|  |  | ||||||
|         obj = self.Person.objects.only('name', 'age').get() |  | ||||||
|         self.assertEqual(obj.name, person.name) |  | ||||||
|         self.assertEqual(obj.age, person.age) |  | ||||||
|  |  | ||||||
|         obj = self.Person.objects.only(*('id', 'name',)).get() |  | ||||||
|         self.assertEqual(obj.name, person.name) |  | ||||||
|         self.assertEqual(obj.age, None) |  | ||||||
|  |  | ||||||
|         # Check polymorphism still works |  | ||||||
|         class Employee(self.Person): |  | ||||||
|             salary = IntField(db_field='wage') |  | ||||||
|  |  | ||||||
|         employee = Employee(name='test employee', age=40, salary=30000) |  | ||||||
|         employee.save() |  | ||||||
|  |  | ||||||
|         obj = self.Person.objects(id=employee.id).only('age').get() |  | ||||||
|         self.assertIsInstance(obj, Employee) |  | ||||||
|  |  | ||||||
|         # Check field names are looked up properly |  | ||||||
|         obj = Employee.objects(id=employee.id).only('salary').get() |  | ||||||
|         self.assertEqual(obj.salary, employee.salary) |  | ||||||
|         self.assertEqual(obj.name, None) |  | ||||||
|  |  | ||||||
|     def test_only_with_subfields(self): |  | ||||||
|         class User(EmbeddedDocument): |  | ||||||
|             name = StringField() |  | ||||||
|             email = StringField() |  | ||||||
|  |  | ||||||
|         class Comment(EmbeddedDocument): |  | ||||||
|             title = StringField() |  | ||||||
|             text = StringField() |  | ||||||
|  |  | ||||||
|         class VariousData(EmbeddedDocument): |  | ||||||
|             some = BooleanField() |  | ||||||
|  |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             content = StringField() |  | ||||||
|             author = EmbeddedDocumentField(User) |  | ||||||
|             comments = ListField(EmbeddedDocumentField(Comment)) |  | ||||||
|             various = MapField(field=EmbeddedDocumentField(VariousData)) |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         post = BlogPost(content='Had a good coffee today...', various={'test_dynamic': {'some': True}}) |  | ||||||
|         post.author = User(name='Test User') |  | ||||||
|         post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')] |  | ||||||
|         post.save() |  | ||||||
|  |  | ||||||
|         obj = BlogPost.objects.only('author.name',).get() |  | ||||||
|         self.assertEqual(obj.content, None) |  | ||||||
|         self.assertEqual(obj.author.email, None) |  | ||||||
|         self.assertEqual(obj.author.name, 'Test User') |  | ||||||
|         self.assertEqual(obj.comments, []) |  | ||||||
|  |  | ||||||
|         obj = BlogPost.objects.only('various.test_dynamic.some').get() |  | ||||||
|         self.assertEqual(obj.various["test_dynamic"].some, True) |  | ||||||
|  |  | ||||||
|         obj = BlogPost.objects.only('content', 'comments.title',).get() |  | ||||||
|         self.assertEqual(obj.content, 'Had a good coffee today...') |  | ||||||
|         self.assertEqual(obj.author, None) |  | ||||||
|         self.assertEqual(obj.comments[0].title, 'I aggree') |  | ||||||
|         self.assertEqual(obj.comments[1].title, 'Coffee') |  | ||||||
|         self.assertEqual(obj.comments[0].text, None) |  | ||||||
|         self.assertEqual(obj.comments[1].text, None) |  | ||||||
|  |  | ||||||
|         obj = BlogPost.objects.only('comments',).get() |  | ||||||
|         self.assertEqual(obj.content, None) |  | ||||||
|         self.assertEqual(obj.author, None) |  | ||||||
|         self.assertEqual(obj.comments[0].title, 'I aggree') |  | ||||||
|         self.assertEqual(obj.comments[1].title, 'Coffee') |  | ||||||
|         self.assertEqual(obj.comments[0].text, 'Great post!') |  | ||||||
|         self.assertEqual(obj.comments[1].text, 'I hate coffee') |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_exclude(self): |  | ||||||
|         class User(EmbeddedDocument): |  | ||||||
|             name = StringField() |  | ||||||
|             email = StringField() |  | ||||||
|  |  | ||||||
|         class Comment(EmbeddedDocument): |  | ||||||
|             title = StringField() |  | ||||||
|             text = StringField() |  | ||||||
|  |  | ||||||
|         class BlogPost(Document): |  | ||||||
|             content = StringField() |  | ||||||
|             author = EmbeddedDocumentField(User) |  | ||||||
|             comments = ListField(EmbeddedDocumentField(Comment)) |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         post = BlogPost(content='Had a good coffee today...') |  | ||||||
|         post.author = User(name='Test User') |  | ||||||
|         post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')] |  | ||||||
|         post.save() |  | ||||||
|  |  | ||||||
|         obj = BlogPost.objects.exclude('author', 'comments.text').get() |  | ||||||
|         self.assertEqual(obj.author, None) |  | ||||||
|         self.assertEqual(obj.content, 'Had a good coffee today...') |  | ||||||
|         self.assertEqual(obj.comments[0].title, 'I aggree') |  | ||||||
|         self.assertEqual(obj.comments[0].text, None) |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_exclude_only_combining(self): |  | ||||||
|         class Attachment(EmbeddedDocument): |  | ||||||
|             name = StringField() |  | ||||||
|             content = StringField() |  | ||||||
|  |  | ||||||
|         class Email(Document): |  | ||||||
|             sender = StringField() |  | ||||||
|             to = StringField() |  | ||||||
|             subject = StringField() |  | ||||||
|             body = StringField() |  | ||||||
|             content_type = StringField() |  | ||||||
|             attachments = ListField(EmbeddedDocumentField(Attachment)) |  | ||||||
|  |  | ||||||
|         Email.drop_collection() |  | ||||||
|         email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain') |  | ||||||
|         email.attachments = [ |  | ||||||
|             Attachment(name='file1.doc', content='ABC'), |  | ||||||
|             Attachment(name='file2.doc', content='XYZ'), |  | ||||||
|         ] |  | ||||||
|         email.save() |  | ||||||
|  |  | ||||||
|         obj = Email.objects.exclude('content_type').exclude('body').get() |  | ||||||
|         self.assertEqual(obj.sender, 'me') |  | ||||||
|         self.assertEqual(obj.to, 'you') |  | ||||||
|         self.assertEqual(obj.subject, 'From Russia with Love') |  | ||||||
|         self.assertEqual(obj.body, None) |  | ||||||
|         self.assertEqual(obj.content_type, None) |  | ||||||
|  |  | ||||||
|         obj = Email.objects.only('sender', 'to').exclude('body', 'sender').get() |  | ||||||
|         self.assertEqual(obj.sender, None) |  | ||||||
|         self.assertEqual(obj.to, 'you') |  | ||||||
|         self.assertEqual(obj.subject, None) |  | ||||||
|         self.assertEqual(obj.body, None) |  | ||||||
|         self.assertEqual(obj.content_type, None) |  | ||||||
|  |  | ||||||
|         obj = Email.objects.exclude('attachments.content').exclude('body').only('to', 'attachments.name').get() |  | ||||||
|         self.assertEqual(obj.attachments[0].name, 'file1.doc') |  | ||||||
|         self.assertEqual(obj.attachments[0].content, None) |  | ||||||
|         self.assertEqual(obj.sender, None) |  | ||||||
|         self.assertEqual(obj.to, 'you') |  | ||||||
|         self.assertEqual(obj.subject, None) |  | ||||||
|         self.assertEqual(obj.body, None) |  | ||||||
|         self.assertEqual(obj.content_type, None) |  | ||||||
|  |  | ||||||
|         Email.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_all_fields(self): |  | ||||||
|  |  | ||||||
|         class Email(Document): |  | ||||||
|             sender = StringField() |  | ||||||
|             to = StringField() |  | ||||||
|             subject = StringField() |  | ||||||
|             body = StringField() |  | ||||||
|             content_type = StringField() |  | ||||||
|  |  | ||||||
|         Email.drop_collection() |  | ||||||
|  |  | ||||||
|         email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain') |  | ||||||
|         email.save() |  | ||||||
|  |  | ||||||
|         obj = Email.objects.exclude('content_type', 'body').only('to', 'body').all_fields().get() |  | ||||||
|         self.assertEqual(obj.sender, 'me') |  | ||||||
|         self.assertEqual(obj.to, 'you') |  | ||||||
|         self.assertEqual(obj.subject, 'From Russia with Love') |  | ||||||
|         self.assertEqual(obj.body, 'Hello!') |  | ||||||
|         self.assertEqual(obj.content_type, 'text/plain') |  | ||||||
|  |  | ||||||
|         Email.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_slicing_fields(self): |  | ||||||
|         """Ensure that query slicing an array works. |  | ||||||
|         """ |  | ||||||
|         class Numbers(Document): |  | ||||||
|             n = ListField(IntField()) |  | ||||||
|  |  | ||||||
|         Numbers.drop_collection() |  | ||||||
|  |  | ||||||
|         numbers = Numbers(n=[0, 1, 2, 3, 4, 5, -5, -4, -3, -2, -1]) |  | ||||||
|         numbers.save() |  | ||||||
|  |  | ||||||
|         # first three |  | ||||||
|         numbers = Numbers.objects.fields(slice__n=3).get() |  | ||||||
|         self.assertEqual(numbers.n, [0, 1, 2]) |  | ||||||
|  |  | ||||||
|         # last three |  | ||||||
|         numbers = Numbers.objects.fields(slice__n=-3).get() |  | ||||||
|         self.assertEqual(numbers.n, [-3, -2, -1]) |  | ||||||
|  |  | ||||||
|         # skip 2, limit 3 |  | ||||||
|         numbers = Numbers.objects.fields(slice__n=[2, 3]).get() |  | ||||||
|         self.assertEqual(numbers.n, [2, 3, 4]) |  | ||||||
|  |  | ||||||
|         # skip to fifth from last, limit 4 |  | ||||||
|         numbers = Numbers.objects.fields(slice__n=[-5, 4]).get() |  | ||||||
|         self.assertEqual(numbers.n, [-5, -4, -3, -2]) |  | ||||||
|  |  | ||||||
|         # skip to fifth from last, limit 10 |  | ||||||
|         numbers = Numbers.objects.fields(slice__n=[-5, 10]).get() |  | ||||||
|         self.assertEqual(numbers.n, [-5, -4, -3, -2, -1]) |  | ||||||
|  |  | ||||||
|         # skip to fifth from last, limit 10 dict method |  | ||||||
|         numbers = Numbers.objects.fields(n={"$slice": [-5, 10]}).get() |  | ||||||
|         self.assertEqual(numbers.n, [-5, -4, -3, -2, -1]) |  | ||||||
|  |  | ||||||
|     def test_slicing_nested_fields(self): |  | ||||||
|         """Ensure that query slicing an embedded array works. |  | ||||||
|         """ |  | ||||||
|  |  | ||||||
|         class EmbeddedNumber(EmbeddedDocument): |  | ||||||
|             n = ListField(IntField()) |  | ||||||
|  |  | ||||||
|         class Numbers(Document): |  | ||||||
|             embedded = EmbeddedDocumentField(EmbeddedNumber) |  | ||||||
|  |  | ||||||
|         Numbers.drop_collection() |  | ||||||
|  |  | ||||||
|         numbers = Numbers() |  | ||||||
|         numbers.embedded = EmbeddedNumber(n=[0, 1, 2, 3, 4, 5, -5, -4, -3, -2, -1]) |  | ||||||
|         numbers.save() |  | ||||||
|  |  | ||||||
|         # first three |  | ||||||
|         numbers = Numbers.objects.fields(slice__embedded__n=3).get() |  | ||||||
|         self.assertEqual(numbers.embedded.n, [0, 1, 2]) |  | ||||||
|  |  | ||||||
|         # last three |  | ||||||
|         numbers = Numbers.objects.fields(slice__embedded__n=-3).get() |  | ||||||
|         self.assertEqual(numbers.embedded.n, [-3, -2, -1]) |  | ||||||
|  |  | ||||||
|         # skip 2, limit 3 |  | ||||||
|         numbers = Numbers.objects.fields(slice__embedded__n=[2, 3]).get() |  | ||||||
|         self.assertEqual(numbers.embedded.n, [2, 3, 4]) |  | ||||||
|  |  | ||||||
|         # skip to fifth from last, limit 4 |  | ||||||
|         numbers = Numbers.objects.fields(slice__embedded__n=[-5, 4]).get() |  | ||||||
|         self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2]) |  | ||||||
|  |  | ||||||
|         # skip to fifth from last, limit 10 |  | ||||||
|         numbers = Numbers.objects.fields(slice__embedded__n=[-5, 10]).get() |  | ||||||
|         self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1]) |  | ||||||
|  |  | ||||||
|         # skip to fifth from last, limit 10 dict method |  | ||||||
|         numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get() |  | ||||||
|         self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1]) |  | ||||||
|  |  | ||||||
|     def test_exclude_from_subclasses_docs(self): |  | ||||||
|  |  | ||||||
|         class Base(Document): |  | ||||||
|             username = StringField() |  | ||||||
|  |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|  |  | ||||||
|         class Anon(Base): |  | ||||||
|             anon = BooleanField() |  | ||||||
|  |  | ||||||
|         class User(Base): |  | ||||||
|             password = StringField() |  | ||||||
|             wibble = StringField() |  | ||||||
|  |  | ||||||
|         Base.drop_collection() |  | ||||||
|         User(username="mongodb", password="secret").save() |  | ||||||
|  |  | ||||||
|         user = Base.objects().exclude("password", "wibble").first() |  | ||||||
|         self.assertEqual(user.password, None) |  | ||||||
|  |  | ||||||
|         self.assertRaises(LookUpError, Base.objects.exclude, "made_up") |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     unittest.main() |  | ||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user