Compare commits
	
		
			360 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | e8dbd12f22 | ||
|  | ca230d28b4 | ||
|  | c96065b187 | ||
|  | 2abcf4764d | ||
|  | bb0b1e88ef | ||
|  | 63c9135184 | ||
|  | 7fac0ef961 | ||
|  | 5a2e268160 | ||
|  | a4e4e8f440 | ||
|  | b62ce947a6 | ||
|  | 9538662262 | ||
|  | 09d7ae4f80 | ||
|  | d7ded366c7 | ||
|  | 09c77973a0 | ||
|  | 22f3c70234 | ||
|  | 6527b1386f | ||
|  | baabf97acd | ||
|  | 97005aca66 | ||
|  | 6e8ea50c19 | ||
|  | 1fcd706e11 | ||
|  | 008bb19b0b | ||
|  | 023acab779 | ||
|  | 5d120ebca0 | ||
|  | f91b89f723 | ||
|  | 1181b75e16 | ||
|  | 5f00b4f923 | ||
|  | 4c31193b82 | ||
|  | 17fc9d1886 | ||
|  | d7285d43dd | ||
|  | aa8a991d20 | ||
|  | 40ba51ac43 | ||
|  | d20430a778 | ||
|  | f08f749cd9 | ||
|  | a6c04f4f9a | ||
|  | 15b6c1590f | ||
|  | 4a8985278d | ||
|  | 996618a495 | ||
|  | 1f02d5fbbd | ||
|  | c58b9f00f0 | ||
|  | f131b18cbe | ||
|  | 118a998138 | ||
|  | 7ad6f036e7 | ||
|  | 1d29b824a8 | ||
|  | 3caf2dce28 | ||
|  | 1fc5b954f2 | ||
|  | 31d99c0bd2 | ||
|  | 0ac59c67ea | ||
|  | 8e8c74c621 | ||
|  | f996f3df74 | ||
|  | 9499c97e18 | ||
|  | c1c81fc07b | ||
|  | 072e86a2f0 | ||
|  | 70d6e763b0 | ||
|  | 15f4d4fee6 | ||
|  | 82e28dec43 | ||
|  | b407c0e6c6 | ||
|  | 27ea01ee05 | ||
|  | 7ed5829b2c | ||
|  | 5bf1dd55b1 | ||
|  | 36aebffcc0 | ||
|  | 84c42ed58c | ||
|  | 9634e44343 | ||
|  | 048a045966 | ||
|  | a18c8c0eb4 | ||
|  | 5fb0f46e3f | ||
|  | 962997ed16 | ||
|  | daca0ebc14 | ||
|  | 9ae8fe7c2d | ||
|  | 1907133f99 | ||
|  | 4334955e39 | ||
|  | f00c9dc4d6 | ||
|  | 7d0687ec73 | ||
|  | da3773bfe8 | ||
|  | 6e1c132ee8 | ||
|  | 24ba35d76f | ||
|  | 64b63e9d52 | ||
|  | 7848a82a1c | ||
|  | 6a843cc8b2 | ||
|  | ecdb0785a4 | ||
|  | 9a55caed75 | ||
|  | 2e01eb87db | ||
|  | 597b962ad5 | ||
|  | 7531f533e0 | ||
|  | 6b9d71554e | ||
|  | bb1089e03d | ||
|  | c82f0c937d | ||
|  | 00d2fd685a | ||
|  | f28e1b8c90 | ||
|  | 2b17985a11 | ||
|  | b392e3102e | ||
|  | 58b0b18ddd | ||
|  | 6a9ef319d0 | ||
|  | cf38ef70cb | ||
|  | ac64ade10f | ||
|  | ee85af34d8 | ||
|  | 9d53ad53e5 | ||
|  | 9cdc3ebee6 | ||
|  | 14a5e05d64 | ||
|  | f7b7d0f79e | ||
|  | d98f36ceff | ||
|  | abfabc30c9 | ||
|  | c1aff7a248 | ||
|  | e44f71eeb1 | ||
|  | cb578c84e2 | ||
|  | 565e1dc0ed | ||
|  | b1e28d02f7 | ||
|  | d1467c2f73 | ||
|  | c439150431 | ||
|  | 9bb3dfd639 | ||
|  | 4caa58b9ec | ||
|  | b5213097e8 | ||
|  | 61081651e4 | ||
|  | 4ccfdf051d | ||
|  | 9f2a9d9cda | ||
|  | 827de76345 | ||
|  | fdcaca42ae | ||
|  | 0744892244 | ||
|  | b70ffc69df | ||
|  | 73b12cc32f | ||
|  | ba6a37f315 | ||
|  | 6f8be8c8ac | ||
|  | 68497542b3 | ||
|  | 3d762fed10 | ||
|  | 48b849c031 | ||
|  | 88c4aa2d87 | ||
|  | fb8c0d8fe3 | ||
|  | 1a863725d1 | ||
|  | 7b4245c91c | ||
|  | 9bd0d6b99d | ||
|  | b640c766db | ||
|  | 50ffa8014e | ||
|  | 7ef688b256 | ||
|  | b4fe0b35e4 | ||
|  | a2cbbdf819 | ||
|  | 35b7efe3f4 | ||
|  | 7cea2a768f | ||
|  | 7247b9b68e | ||
|  | dca837b843 | ||
|  | c60c2ee8d0 | ||
|  | 3cdb5b5db2 | ||
|  | b9cc8a4ca9 | ||
|  | 28606e9985 | ||
|  | 5bbe782812 | ||
|  | d65861cdf7 | ||
|  | c8df3fd2a7 | ||
|  | 6cfe6652a3 | ||
|  | 6b711da69d | ||
|  | 9b02867293 | ||
|  | 595cb99b2d | ||
|  | f0a3445250 | ||
|  | 6d353dae1e | ||
|  | 57a38282a9 | ||
|  | db47604865 | ||
|  | 2a121fe202 | ||
|  | 36baff0d7f | ||
|  | 201f3008b1 | ||
|  | f4873fee18 | ||
|  | e02261be6d | ||
|  | 2919e6765c | ||
|  | b8fc4d0079 | ||
|  | 4a46f5f095 | ||
|  | 3484ceabb8 | ||
|  | cab659dce6 | ||
|  | a657f29439 | ||
|  | 4c054bf316 | ||
|  | dc7922c38b | ||
|  | c6c68abfcc | ||
|  | 6aacb0c898 | ||
|  | e7000db491 | ||
|  | fce994ea7f | ||
|  | 6c6446765e | ||
|  | 69a99c70c6 | ||
|  | 56d9f7a8af | ||
|  | 363aefe399 | ||
|  | 7fd4f792ba | ||
|  | 6fbdde63d8 | ||
|  | b04dc90cdf | ||
|  | b525c91bd3 | ||
|  | a32c893078 | ||
|  | 2c6a744848 | ||
|  | 4492874d08 | ||
|  | d3a592e5bf | ||
|  | cab21b1b21 | ||
|  | 1319e422ea | ||
|  | c88ea40b57 | ||
|  | 3194a37fcb | ||
|  | 72ebaa52e9 | ||
|  | 0e00695fc7 | ||
|  | 48a691e722 | ||
|  | cf54d6d6f8 | ||
|  | a03fe234d0 | ||
|  | d88d40cc08 | ||
|  | d3b4af116e | ||
|  | 352b23331b | ||
|  | bdd6041a5c | ||
|  | 1894003f8a | ||
|  | 220513ae42 | ||
|  | fcbabbe357 | ||
|  | 3627969fce | ||
|  | 8807c0dbef | ||
|  | 23cc9f6ff8 | ||
|  | e50799e9c4 | ||
|  | b92c4844eb | ||
|  | c306d42d08 | ||
|  | e31558318e | ||
|  | 78a9420f26 | ||
|  | b47c5b5bfc | ||
|  | 28a312accf | ||
|  | 611094e92e | ||
|  | 2a8579a6a5 | ||
|  | 47577f2f47 | ||
|  | 34e3e45843 | ||
|  | 364dc9ddfb | ||
|  | 23324f0f87 | ||
|  | 17fa9a3b77 | ||
|  | 424b3ca308 | ||
|  | 26e2fc8fd4 | ||
|  | 8e18484898 | ||
|  | 354cfe0f9c | ||
|  | 983474b2bd | ||
|  | 14d861bcbb | ||
|  | f6cd349a16 | ||
|  | 8e1c4dec87 | ||
|  | 18b47e4a73 | ||
|  | 4f157f50ed | ||
|  | f44a2f4857 | ||
|  | c685ace327 | ||
|  | f23b0faf41 | ||
|  | e0e2ca7ccd | ||
|  | 83fe7f7eef | ||
|  | 1feaa8f2e9 | ||
|  | 598d6bf4c5 | ||
|  | 0afd5a40d6 | ||
|  | 26b70e9ed3 | ||
|  | a1a93a4bdd | ||
|  | 4939a7dd7c | ||
|  | 0fa6610fdb | ||
|  | b0148e7860 | ||
|  | 59a06a242d | ||
|  | ffe902605d | ||
|  | 556f7e85fc | ||
|  | 45c86be402 | ||
|  | bf34f413de | ||
|  | 9b022b187f | ||
|  | c3409d64dc | ||
|  | 3c5c3b5026 | ||
|  | f240f00d84 | ||
|  | 68c7764c63 | ||
|  | adfb039ba6 | ||
|  | 89416d9856 | ||
|  | 9b6c972e0f | ||
|  | 55fc04752a | ||
|  | 96f0919633 | ||
|  | 17b140baf4 | ||
|  | 45c2151d0f | ||
|  | 1887f5b7e7 | ||
|  | 708d1c7a32 | ||
|  | acf8c3015a | ||
|  | f83ae5789b | ||
|  | 57ccfcfc1b | ||
|  | dd0fdcfdd4 | ||
|  | 5c805be067 | ||
|  | e423380d7f | ||
|  | 4d8bebc917 | ||
|  | 4314fa883f | ||
|  | d6e39b362b | ||
|  | f89214f9cf | ||
|  | d17cac8210 | ||
|  | aa49283fa9 | ||
|  | e79ea7a2cf | ||
|  | 8a1d280f19 | ||
|  | 6a8eb9562f | ||
|  | 8f76e1e344 | ||
|  | 7b9f084e6b | ||
|  | 5b1693a908 | ||
|  | fd7c00da49 | ||
|  | 7fc5ced3af | ||
|  | a86092fb64 | ||
|  | 003827e916 | ||
|  | b15673c525 | ||
|  | 00363303b1 | ||
|  | 48fbe890f8 | ||
|  | 4179877cc7 | ||
|  | 282b83ac08 | ||
|  | 193656e71b | ||
|  | a25d127f36 | ||
|  | cf9df548ca | ||
|  | f29b93c762 | ||
|  | 032ace40d1 | ||
|  | f74dd1cb3c | ||
|  | 29889d1e35 | ||
|  | d6d19c4229 | ||
|  | ab08e67eaf | ||
|  | 00bf6ac258 | ||
|  | b65478e7d9 | ||
|  | e83b529f1c | ||
|  | 408274152b | ||
|  | 8ff82996fb | ||
|  | d59c4044b7 | ||
|  | 3574e21e4f | ||
|  | 5a091956ef | ||
|  | 14e9c58444 | ||
|  | bfe5b03c69 | ||
|  | f96f7f840e | ||
|  | a3bcf26dce | ||
|  | a7852a89cc | ||
|  | 1b0c761fc0 | ||
|  | 5e4e8d4eda | ||
|  | bd524d2e1e | ||
|  | 60fe919992 | ||
|  | b90063b170 | ||
|  | d9fce49b08 | ||
|  | 5dbee2a270 | ||
|  | 4779106139 | ||
|  | bf2de81873 | ||
|  | 28cdedc9aa | ||
|  | 7e90571404 | ||
|  | 42bbe63927 | ||
|  | 7ddbea697e | ||
|  | b4860de34d | ||
|  | 576f23d5fb | ||
|  | 86548fc7bf | ||
|  | b3b4d992fe | ||
|  | d72daf5f39 | ||
|  | 9ad959a478 | ||
|  | cc00a321da | ||
|  | de74273108 | ||
|  | a7658c7573 | ||
|  | 48a85ee6e0 | ||
|  | 461b789515 | ||
|  | b71ff6fbb8 | ||
|  | 1bcdcce93a | ||
|  | c09bfca634 | ||
|  | 36c5f02bfb | ||
|  | eae6e5d9a1 | ||
|  | 364813dd73 | ||
|  | 1a2b1f283b | ||
|  | a0e5cf4ecc | ||
|  | 820f7b4d93 | ||
|  | 727866f090 | ||
|  | 3d45cdc339 | ||
|  | 02a557aa67 | ||
|  | 6da27e5976 | ||
|  | 19a6e324c4 | ||
|  | 62eadbc174 | ||
|  | 080226dd72 | ||
|  | 2d76aebb8e | ||
|  | 8b5df3ca17 | ||
|  | 6b38ef3c9f | ||
|  | 15451ff42b | ||
|  | 6e2db1ced6 | ||
|  | 5c4ce8754e | ||
|  | 416486c370 | ||
|  | 2f075be6f8 | ||
|  | c4de879b20 | ||
|  | ee5686e91a | ||
|  | 2a795e9138 | ||
|  | 437b11af9a | ||
|  | 99a5f2cd9d | ||
|  | 54d8c64ad5 | 
| @@ -1,27 +0,0 @@ | |||||||
| #!/bin/bash |  | ||||||
|  |  | ||||||
| sudo apt-get remove mongodb-org-server |  | ||||||
| sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10 |  | ||||||
|  |  | ||||||
| if [ "$MONGODB" = "2.4" ]; then |  | ||||||
|     echo "deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen" | sudo tee /etc/apt/sources.list.d/mongodb.list |  | ||||||
|     sudo apt-get update |  | ||||||
|     sudo apt-get install mongodb-10gen=2.4.14 |  | ||||||
|     sudo service mongodb start |  | ||||||
| elif [ "$MONGODB" = "2.6" ]; then |  | ||||||
|     echo "deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen" | sudo tee /etc/apt/sources.list.d/mongodb.list |  | ||||||
|     sudo apt-get update |  | ||||||
|     sudo apt-get install mongodb-org-server=2.6.12 |  | ||||||
|     # service should be started automatically |  | ||||||
| elif [ "$MONGODB" = "3.0" ]; then |  | ||||||
|     echo "deb http://repo.mongodb.org/apt/ubuntu trusty/mongodb-org/3.0 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb.list |  | ||||||
|     sudo apt-get update |  | ||||||
|     sudo apt-get install mongodb-org-server=3.0.14 |  | ||||||
|     # service should be started automatically |  | ||||||
| else |  | ||||||
|     echo "Invalid MongoDB version, expected 2.4, 2.6, or 3.0." |  | ||||||
|     exit 1 |  | ||||||
| fi; |  | ||||||
|  |  | ||||||
| mkdir db |  | ||||||
| 1>db/logs mongod --dbpath=db & |  | ||||||
							
								
								
									
										89
									
								
								.travis.yml
									
									
									
									
									
								
							
							
						
						
									
										89
									
								
								.travis.yml
									
									
									
									
									
								
							| @@ -2,68 +2,73 @@ | |||||||
| # PyMongo combinations. However, that would result in an overly long build | # PyMongo combinations. However, that would result in an overly long build | ||||||
| # with a very large number of jobs, hence we only test a subset of all the | # with a very large number of jobs, hence we only test a subset of all the | ||||||
| # combinations: | # combinations: | ||||||
| # * MongoDB v2.4 & v3.0 are only tested against Python v2.7 & v3.5. | # * MongoDB v3.4 & the latest PyMongo v3.x is currently the "main" setup, | ||||||
| # * MongoDB v2.4 is tested against PyMongo v2.7 & v3.x. | #   tested against Python v2.7, v3.5, v3.6, and PyPy. | ||||||
| # * MongoDB v3.0 is tested against PyMongo v3.x. | # * Besides that, we test the lowest actively supported Python/MongoDB/PyMongo | ||||||
| # * MongoDB v2.6 is currently the "main" version tested against Python v2.7, | #   combination: MongoDB v3.4, PyMongo v3.4, Python v2.7. | ||||||
| #   v3.5, PyPy & PyPy3, and PyMongo v2.7, v2.8 & v3.x. | # * MongoDB v3.6 is tested against Python v3.6, and PyMongo v3.6, v3.7, v3.8. | ||||||
|  | # | ||||||
|  | # We should periodically check MongoDB Server versions supported by MongoDB | ||||||
|  | # Inc., add newly released versions to the test matrix, and remove versions | ||||||
|  | # which have reached their End of Life. See: | ||||||
|  | # 1. https://www.mongodb.com/support-policy. | ||||||
|  | # 2. https://docs.mongodb.com/ecosystem/drivers/driver-compatibility-reference/#python-driver-compatibility | ||||||
| # | # | ||||||
| # Reminder: Update README.rst if you change MongoDB versions we test. | # Reminder: Update README.rst if you change MongoDB versions we test. | ||||||
|  |  | ||||||
| language: python |  | ||||||
|  |  | ||||||
|  | language: python | ||||||
| python: | python: | ||||||
| - 2.7 | - 2.7 | ||||||
| - 3.5 | - 3.5 | ||||||
| - 3.6 | - 3.6 | ||||||
| - pypy | - pypy | ||||||
|  |  | ||||||
|  | dist: xenial | ||||||
|  |  | ||||||
| env: | env: | ||||||
| - MONGODB=2.6 PYMONGO=3.x |   global: | ||||||
|  |     - MONGODB_3_4=3.4.17 | ||||||
|  |     - MONGODB_3_6=3.6.12 | ||||||
|  |   matrix: | ||||||
|  |     - MONGODB=${MONGODB_3_4} PYMONGO=3.x | ||||||
|  |  | ||||||
| matrix: | matrix: | ||||||
|  |  | ||||||
|   # Finish the build as soon as one job fails |   # Finish the build as soon as one job fails | ||||||
|   fast_finish: true |   fast_finish: true | ||||||
|  |  | ||||||
|   include: |   include: | ||||||
|   - python: 2.7 |   - python: 2.7 | ||||||
|     env: MONGODB=2.4 PYMONGO=3.5 |     env: MONGODB=${MONGODB_3_4} PYMONGO=3.4.x | ||||||
|   - python: 2.7 |  | ||||||
|     env: MONGODB=3.0 PYMONGO=3.x |  | ||||||
|   - python: 3.5 |  | ||||||
|     env: MONGODB=2.4 PYMONGO=3.5 |  | ||||||
|   - python: 3.5 |  | ||||||
|     env: MONGODB=3.0 PYMONGO=3.x |  | ||||||
|   - python: 3.6 |   - python: 3.6 | ||||||
|     env: MONGODB=2.4 PYMONGO=3.5 |     env: MONGODB=${MONGODB_3_6} PYMONGO=3.x | ||||||
|   - python: 3.6 |   - python: 3.7 | ||||||
|     env: MONGODB=3.0 PYMONGO=3.x |     env: MONGODB=${MONGODB_3_6} PYMONGO=3.x | ||||||
|  |  | ||||||
| before_install: |  | ||||||
| - bash .install_mongodb_on_travis.sh |  | ||||||
| - sleep 15  # https://docs.travis-ci.com/user/database-setup/#MongoDB-does-not-immediately-accept-connections |  | ||||||
| - mongo --eval 'db.version();' |  | ||||||
|  |  | ||||||
| install: | install: | ||||||
| - sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev |   # Install Mongo | ||||||
|   libtiff4-dev libjpeg8-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.5-dev tk8.5-dev |   - wget http://fastdl.mongodb.org/linux/mongodb-linux-x86_64-${MONGODB}.tgz | ||||||
|   python-tk |   - tar xzf mongodb-linux-x86_64-${MONGODB}.tgz | ||||||
| - travis_retry pip install --upgrade pip |   - ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --version | ||||||
| - travis_retry pip install coveralls |   # Install python dependencies | ||||||
| - travis_retry pip install flake8 flake8-import-order |   - pip install --upgrade pip | ||||||
| - travis_retry pip install tox>=1.9 |   - pip install coveralls | ||||||
| - travis_retry pip install "virtualenv<14.0.0"  # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32) |   - pip install flake8 flake8-import-order | ||||||
| - travis_retry tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test |   - pip install tox         # tox 3.11.0 has requirement virtualenv>=14.0.0 | ||||||
|  |   - pip install virtualenv  # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32) | ||||||
|  |   # Install the tox venv | ||||||
|  |   - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test | ||||||
|  |  | ||||||
| # Cache dependencies installed via pip |  | ||||||
| cache: pip |  | ||||||
|  |  | ||||||
| # Run flake8 for py27 |  | ||||||
| before_script: | before_script: | ||||||
| - if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi |   - mkdir ${PWD}/mongodb-linux-x86_64-${MONGODB}/data | ||||||
|  |   - ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --dbpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/data --logpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/mongodb.log --fork | ||||||
|  |   - if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi   # Run flake8 for py27 | ||||||
|  |   - mongo --eval 'db.version();'    # Make sure mongo is awake | ||||||
|  |  | ||||||
| script: | script: | ||||||
| - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage |   - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage | ||||||
|  |  | ||||||
| # For now only submit coveralls for Python v2.7. Python v3.x currently shows | # For now only submit coveralls for Python v2.7. Python v3.x currently shows | ||||||
| # 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible | # 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible | ||||||
| @@ -87,15 +92,15 @@ deploy: | |||||||
|   password: |   password: | ||||||
|     secure: QMyatmWBnC6ZN3XLW2+fTBDU4LQcp1m/LjR2/0uamyeUzWKdlOoh/Wx5elOgLwt/8N9ppdPeG83ose1jOz69l5G0MUMjv8n/RIcMFSpCT59tGYqn3kh55b0cIZXFT9ar+5cxlif6a5rS72IHm5li7QQyxexJIII6Uxp0kpvUmek= |     secure: QMyatmWBnC6ZN3XLW2+fTBDU4LQcp1m/LjR2/0uamyeUzWKdlOoh/Wx5elOgLwt/8N9ppdPeG83ose1jOz69l5G0MUMjv8n/RIcMFSpCT59tGYqn3kh55b0cIZXFT9ar+5cxlif6a5rS72IHm5li7QQyxexJIII6Uxp0kpvUmek= | ||||||
|  |  | ||||||
|   # create a source distribution and a pure python wheel for faster installs |   # Create a source distribution and a pure python wheel for faster installs. | ||||||
|   distributions: "sdist bdist_wheel" |   distributions: "sdist bdist_wheel" | ||||||
|  |  | ||||||
|   # only deploy on tagged commits (aka GitHub releases) and only for the |   # Only deploy on tagged commits (aka GitHub releases) and only for the parent | ||||||
|   # parent repo's builds running Python 2.7 along with PyMongo v3.x (we run |   # repo's builds running Python v2.7 along with PyMongo v3.x and MongoDB v3.4. | ||||||
|   # Travis against many different Python and PyMongo versions and we don't |   # We run Travis against many different Python, PyMongo, and MongoDB versions | ||||||
|   # want the deploy to occur multiple times). |   # and we don't want the deploy to occur multiple times). | ||||||
|   on: |   on: | ||||||
|     tags: true |     tags: true | ||||||
|     repo: MongoEngine/mongoengine |     repo: MongoEngine/mongoengine | ||||||
|     condition: "$PYMONGO = 3.x" |     condition: ($PYMONGO = 3.x) && ($MONGODB = 3.4) | ||||||
|     python: 2.7 |     python: 2.7 | ||||||
|   | |||||||
							
								
								
									
										6
									
								
								AUTHORS
									
									
									
									
									
								
							
							
						
						
									
										6
									
								
								AUTHORS
									
									
									
									
									
								
							| @@ -246,3 +246,9 @@ that much better: | |||||||
|  * Renjianxin (https://github.com/Davidrjx) |  * Renjianxin (https://github.com/Davidrjx) | ||||||
|  * Erdenezul Batmunkh (https://github.com/erdenezul) |  * Erdenezul Batmunkh (https://github.com/erdenezul) | ||||||
|  * Andy Yankovsky (https://github.com/werat) |  * Andy Yankovsky (https://github.com/werat) | ||||||
|  |  * Bastien Gérard (https://github.com/bagerard) | ||||||
|  |  * Trevor Hall (https://github.com/tjhall13) | ||||||
|  |  * Gleb Voropaev (https://github.com/buggyspace) | ||||||
|  |  * Paulo Amaral (https://github.com/pauloAmaral) | ||||||
|  |  * Gaurav Dadhania (https://github.com/GVRV) | ||||||
|  |  * Yurii Andrieiev (https://github.com/yandrieiev) | ||||||
|   | |||||||
| @@ -22,8 +22,11 @@ Supported Interpreters | |||||||
|  |  | ||||||
| MongoEngine supports CPython 2.7 and newer. Language | MongoEngine supports CPython 2.7 and newer. Language | ||||||
| features not supported by all interpreters can not be used. | features not supported by all interpreters can not be used. | ||||||
| Please also ensure that your code is properly converted by | The codebase is written in python 2 so you must be using python 2 | ||||||
| `2to3 <http://docs.python.org/library/2to3.html>`_ for Python 3 support. | when developing new features. Compatibility of the library with Python 3 | ||||||
|  | relies on the 2to3 package that gets executed as part of the installation | ||||||
|  | build. You should ensure that your code is properly converted by | ||||||
|  | `2to3 <http://docs.python.org/library/2to3.html>`_. | ||||||
|  |  | ||||||
| Style Guide | Style Guide | ||||||
| ----------- | ----------- | ||||||
|   | |||||||
							
								
								
									
										18
									
								
								README.rst
									
									
									
									
									
								
							
							
						
						
									
										18
									
								
								README.rst
									
									
									
									
									
								
							| @@ -26,26 +26,28 @@ an `API reference <https://mongoengine-odm.readthedocs.io/apireference.html>`_. | |||||||
|  |  | ||||||
| Supported MongoDB Versions | Supported MongoDB Versions | ||||||
| ========================== | ========================== | ||||||
| MongoEngine is currently tested against MongoDB v2.4, v2.6, and v3.0. Future | MongoEngine is currently tested against MongoDB v3.4 and v3.6. Future versions | ||||||
| versions should be supported as well, but aren't actively tested at the moment. | should be supported as well, but aren't actively tested at the moment. Make | ||||||
| Make sure to open an issue or submit a pull request if you experience any | sure to open an issue or submit a pull request if you experience any problems | ||||||
| problems with MongoDB v3.2+. | with MongoDB version > 3.6. | ||||||
|  |  | ||||||
| Installation | Installation | ||||||
| ============ | ============ | ||||||
| We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of | We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of | ||||||
| `pip <https://pip.pypa.io/>`_. You can then use ``pip install -U mongoengine``. | `pip <https://pip.pypa.io/>`_. You can then use ``pip install -U mongoengine``. | ||||||
| You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ | You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ | ||||||
| and thus you can use ``easy_install -U mongoengine``. Otherwise, you can download the | and thus you can use ``easy_install -U mongoengine``. Another option is | ||||||
| source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python | `pipenv <https://docs.pipenv.org/>`_. You can then use ``pipenv install mongoengine`` | ||||||
| setup.py install``. | to both create the virtual environment and install the package. Otherwise, you can | ||||||
|  | download the source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and | ||||||
|  | run ``python setup.py install``. | ||||||
|  |  | ||||||
| Dependencies | Dependencies | ||||||
| ============ | ============ | ||||||
| All of the dependencies can easily be installed via `pip <https://pip.pypa.io/>`_. | All of the dependencies can easily be installed via `pip <https://pip.pypa.io/>`_. | ||||||
| At the very least, you'll need these two packages to use MongoEngine: | At the very least, you'll need these two packages to use MongoEngine: | ||||||
|  |  | ||||||
| - pymongo>=2.7.1 | - pymongo>=3.4 | ||||||
| - six>=1.10.0 | - six>=1.10.0 | ||||||
|  |  | ||||||
| If you utilize a ``DateTimeField``, you might also use a more flexible date parser: | If you utilize a ``DateTimeField``, you might also use a more flexible date parser: | ||||||
|   | |||||||
							
								
								
									
										207
									
								
								benchmark.py
									
									
									
									
									
								
							
							
						
						
									
										207
									
								
								benchmark.py
									
									
									
									
									
								
							| @@ -1,207 +0,0 @@ | |||||||
| #!/usr/bin/env python |  | ||||||
|  |  | ||||||
| """ |  | ||||||
| Simple benchmark comparing PyMongo and MongoEngine. |  | ||||||
|  |  | ||||||
| Sample run on a mid 2015 MacBook Pro (commit b282511): |  | ||||||
|  |  | ||||||
| Benchmarking... |  | ||||||
| ---------------------------------------------------------------------------------------------------- |  | ||||||
| Creating 10000 dictionaries - Pymongo |  | ||||||
| 2.58979988098 |  | ||||||
| ---------------------------------------------------------------------------------------------------- |  | ||||||
| Creating 10000 dictionaries - Pymongo write_concern={"w": 0} |  | ||||||
| 1.26657605171 |  | ||||||
| ---------------------------------------------------------------------------------------------------- |  | ||||||
| Creating 10000 dictionaries - MongoEngine |  | ||||||
| 8.4351580143 |  | ||||||
| ---------------------------------------------------------------------------------------------------- |  | ||||||
| Creating 10000 dictionaries without continual assign - MongoEngine |  | ||||||
| 7.20191693306 |  | ||||||
| ---------------------------------------------------------------------------------------------------- |  | ||||||
| Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True |  | ||||||
| 6.31104588509 |  | ||||||
| ---------------------------------------------------------------------------------------------------- |  | ||||||
| Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True |  | ||||||
| 6.07083487511 |  | ||||||
| ---------------------------------------------------------------------------------------------------- |  | ||||||
| Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False |  | ||||||
| 5.97704291344 |  | ||||||
| ---------------------------------------------------------------------------------------------------- |  | ||||||
| Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False |  | ||||||
| 5.9111430645 |  | ||||||
| """ |  | ||||||
|  |  | ||||||
| import timeit |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def main(): |  | ||||||
|     print("Benchmarking...") |  | ||||||
|  |  | ||||||
|     setup = """ |  | ||||||
| from pymongo import MongoClient |  | ||||||
| connection = MongoClient() |  | ||||||
| connection.drop_database('timeit_test') |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| from pymongo import MongoClient |  | ||||||
| connection = MongoClient() |  | ||||||
|  |  | ||||||
| db = connection.timeit_test |  | ||||||
| noddy = db.noddy |  | ||||||
|  |  | ||||||
| for i in range(10000): |  | ||||||
|     example = {'fields': {}} |  | ||||||
|     for j in range(20): |  | ||||||
|         example['fields']['key' + str(j)] = 'value ' + str(j) |  | ||||||
|  |  | ||||||
|     noddy.save(example) |  | ||||||
|  |  | ||||||
| myNoddys = noddy.find() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print("-" * 100) |  | ||||||
|     print("""Creating 10000 dictionaries - Pymongo""") |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print(t.timeit(1)) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| from pymongo import MongoClient |  | ||||||
| from pymongo.write_concern import WriteConcern |  | ||||||
| connection = MongoClient() |  | ||||||
|  |  | ||||||
| db = connection.get_database('timeit_test', write_concern=WriteConcern(w=0)) |  | ||||||
| noddy = db.noddy |  | ||||||
|  |  | ||||||
| for i in range(10000): |  | ||||||
|     example = {'fields': {}} |  | ||||||
|     for j in range(20): |  | ||||||
|         example['fields']["key"+str(j)] = "value "+str(j) |  | ||||||
|  |  | ||||||
|     noddy.save(example) |  | ||||||
|  |  | ||||||
| myNoddys = noddy.find() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print("-" * 100) |  | ||||||
|     print("""Creating 10000 dictionaries - Pymongo write_concern={"w": 0}""") |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print(t.timeit(1)) |  | ||||||
|  |  | ||||||
|     setup = """ |  | ||||||
| from pymongo import MongoClient |  | ||||||
| connection = MongoClient() |  | ||||||
| connection.drop_database('timeit_test') |  | ||||||
| connection.close() |  | ||||||
|  |  | ||||||
| from mongoengine import Document, DictField, connect |  | ||||||
| connect('timeit_test') |  | ||||||
|  |  | ||||||
| class Noddy(Document): |  | ||||||
|     fields = DictField() |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in range(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save() |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print("-" * 100) |  | ||||||
|     print("""Creating 10000 dictionaries - MongoEngine""") |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print(t.timeit(1)) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in range(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     fields = {} |  | ||||||
|     for j in range(20): |  | ||||||
|         fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.fields = fields |  | ||||||
|     noddy.save() |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print("-" * 100) |  | ||||||
|     print("""Creating 10000 dictionaries without continual assign - MongoEngine""") |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print(t.timeit(1)) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in range(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save(write_concern={"w": 0}, cascade=True) |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print("-" * 100) |  | ||||||
|     print("""Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True""") |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print(t.timeit(1)) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in range(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save(write_concern={"w": 0}, validate=False, cascade=True) |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print("-" * 100) |  | ||||||
|     print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True""") |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print(t.timeit(1)) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in range(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save(validate=False, write_concern={"w": 0}) |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print("-" * 100) |  | ||||||
|     print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False""") |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print(t.timeit(1)) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in range(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save(force_insert=True, write_concern={"w": 0}, validate=False) |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print("-" * 100) |  | ||||||
|     print("""Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False""") |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print(t.timeit(1)) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == "__main__": |  | ||||||
|     main() |  | ||||||
							
								
								
									
										148
									
								
								benchmarks/test_basic_doc_ops.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										148
									
								
								benchmarks/test_basic_doc_ops.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,148 @@ | |||||||
|  | from timeit import repeat | ||||||
|  |  | ||||||
|  | import mongoengine | ||||||
|  | from mongoengine import (BooleanField, Document, EmailField, EmbeddedDocument, | ||||||
|  |                          EmbeddedDocumentField, IntField, ListField, | ||||||
|  |                          StringField) | ||||||
|  |  | ||||||
|  | mongoengine.connect(db='mongoengine_benchmark_test') | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def timeit(f, n=10000): | ||||||
|  |     return min(repeat(f, repeat=3, number=n)) / float(n) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_basic(): | ||||||
|  |     class Book(Document): | ||||||
|  |         name = StringField() | ||||||
|  |         pages = IntField() | ||||||
|  |         tags = ListField(StringField()) | ||||||
|  |         is_published = BooleanField() | ||||||
|  |         author_email = EmailField() | ||||||
|  |  | ||||||
|  |     Book.drop_collection() | ||||||
|  |  | ||||||
|  |     def init_book(): | ||||||
|  |         return Book( | ||||||
|  |             name='Always be closing', | ||||||
|  |             pages=100, | ||||||
|  |             tags=['self-help', 'sales'], | ||||||
|  |             is_published=True, | ||||||
|  |             author_email='alec@example.com', | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     print('Doc initialization: %.3fus' % (timeit(init_book, 1000) * 10**6)) | ||||||
|  |  | ||||||
|  |     b = init_book() | ||||||
|  |     print('Doc getattr: %.3fus' % (timeit(lambda: b.name, 10000) * 10**6)) | ||||||
|  |  | ||||||
|  |     print( | ||||||
|  |         'Doc setattr: %.3fus' % ( | ||||||
|  |             timeit(lambda: setattr(b, 'name', 'New name'), 10000) * 10**6 | ||||||
|  |         ) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     print('Doc to mongo: %.3fus' % (timeit(b.to_mongo, 1000) * 10**6)) | ||||||
|  |  | ||||||
|  |     print('Doc validation: %.3fus' % (timeit(b.validate, 1000) * 10**6)) | ||||||
|  |  | ||||||
|  |     def save_book(): | ||||||
|  |         b._mark_as_changed('name') | ||||||
|  |         b._mark_as_changed('tags') | ||||||
|  |         b.save() | ||||||
|  |  | ||||||
|  |     print('Save to database: %.3fus' % (timeit(save_book, 100) * 10**6)) | ||||||
|  |  | ||||||
|  |     son = b.to_mongo() | ||||||
|  |     print( | ||||||
|  |         'Load from SON: %.3fus' % ( | ||||||
|  |             timeit(lambda: Book._from_son(son), 1000) * 10**6 | ||||||
|  |         ) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     print( | ||||||
|  |         'Load from database: %.3fus' % ( | ||||||
|  |             timeit(lambda: Book.objects[0], 100) * 10**6 | ||||||
|  |         ) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     def create_and_delete_book(): | ||||||
|  |         b = init_book() | ||||||
|  |         b.save() | ||||||
|  |         b.delete() | ||||||
|  |  | ||||||
|  |     print( | ||||||
|  |         'Init + save to database + delete: %.3fms' % ( | ||||||
|  |             timeit(create_and_delete_book, 10) * 10**3 | ||||||
|  |         ) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def test_big_doc(): | ||||||
|  |     class Contact(EmbeddedDocument): | ||||||
|  |         name = StringField() | ||||||
|  |         title = StringField() | ||||||
|  |         address = StringField() | ||||||
|  |  | ||||||
|  |     class Company(Document): | ||||||
|  |         name = StringField() | ||||||
|  |         contacts = ListField(EmbeddedDocumentField(Contact)) | ||||||
|  |  | ||||||
|  |     Company.drop_collection() | ||||||
|  |  | ||||||
|  |     def init_company(): | ||||||
|  |         return Company( | ||||||
|  |             name='MongoDB, Inc.', | ||||||
|  |             contacts=[ | ||||||
|  |                 Contact( | ||||||
|  |                     name='Contact %d' % x, | ||||||
|  |                     title='CEO', | ||||||
|  |                     address='Address %d' % x, | ||||||
|  |                 ) | ||||||
|  |                 for x in range(1000) | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     company = init_company() | ||||||
|  |     print('Big doc to mongo: %.3fms' % (timeit(company.to_mongo, 100) * 10**3)) | ||||||
|  |  | ||||||
|  |     print('Big doc validation: %.3fms' % (timeit(company.validate, 1000) * 10**3)) | ||||||
|  |  | ||||||
|  |     company.save() | ||||||
|  |  | ||||||
|  |     def save_company(): | ||||||
|  |         company._mark_as_changed('name') | ||||||
|  |         company._mark_as_changed('contacts') | ||||||
|  |         company.save() | ||||||
|  |  | ||||||
|  |     print('Save to database: %.3fms' % (timeit(save_company, 100) * 10**3)) | ||||||
|  |  | ||||||
|  |     son = company.to_mongo() | ||||||
|  |     print( | ||||||
|  |         'Load from SON: %.3fms' % ( | ||||||
|  |             timeit(lambda: Company._from_son(son), 100) * 10**3 | ||||||
|  |         ) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     print( | ||||||
|  |         'Load from database: %.3fms' % ( | ||||||
|  |             timeit(lambda: Company.objects[0], 100) * 10**3 | ||||||
|  |         ) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |     def create_and_delete_company(): | ||||||
|  |         c = init_company() | ||||||
|  |         c.save() | ||||||
|  |         c.delete() | ||||||
|  |  | ||||||
|  |     print( | ||||||
|  |         'Init + save to database + delete: %.3fms' % ( | ||||||
|  |             timeit(create_and_delete_company, 10) * 10**3 | ||||||
|  |         ) | ||||||
|  |     ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | if __name__ == '__main__': | ||||||
|  |     test_basic() | ||||||
|  |     print('-' * 100) | ||||||
|  |     test_big_doc() | ||||||
							
								
								
									
										154
									
								
								benchmarks/test_inserts.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										154
									
								
								benchmarks/test_inserts.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,154 @@ | |||||||
|  | import timeit | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def main(): | ||||||
|  |     setup = """ | ||||||
|  | from pymongo import MongoClient | ||||||
|  | connection = MongoClient() | ||||||
|  | connection.drop_database('mongoengine_benchmark_test') | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | from pymongo import MongoClient | ||||||
|  | connection = MongoClient() | ||||||
|  |  | ||||||
|  | db = connection.mongoengine_benchmark_test | ||||||
|  | noddy = db.noddy | ||||||
|  |  | ||||||
|  | for i in range(10000): | ||||||
|  |     example = {'fields': {}} | ||||||
|  |     for j in range(20): | ||||||
|  |         example['fields']["key"+str(j)] = "value "+str(j) | ||||||
|  |  | ||||||
|  |     noddy.insert_one(example) | ||||||
|  |  | ||||||
|  | myNoddys = noddy.find() | ||||||
|  | [n for n in myNoddys]  # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print('-' * 100) | ||||||
|  |     print('PyMongo: Creating 10000 dictionaries.') | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print('{}s'.format(t.timeit(1))) | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | from pymongo import MongoClient, WriteConcern | ||||||
|  | connection = MongoClient() | ||||||
|  |  | ||||||
|  | db = connection.mongoengine_benchmark_test | ||||||
|  | noddy = db.noddy.with_options(write_concern=WriteConcern(w=0)) | ||||||
|  |  | ||||||
|  | for i in range(10000): | ||||||
|  |     example = {'fields': {}} | ||||||
|  |     for j in range(20): | ||||||
|  |         example['fields']["key"+str(j)] = "value "+str(j) | ||||||
|  |  | ||||||
|  |     noddy.insert_one(example) | ||||||
|  |  | ||||||
|  | myNoddys = noddy.find() | ||||||
|  | [n for n in myNoddys]  # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print('-' * 100) | ||||||
|  |     print('PyMongo: Creating 10000 dictionaries (write_concern={"w": 0}).') | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print('{}s'.format(t.timeit(1))) | ||||||
|  |  | ||||||
|  |     setup = """ | ||||||
|  | from pymongo import MongoClient | ||||||
|  | connection = MongoClient() | ||||||
|  | connection.drop_database('mongoengine_benchmark_test') | ||||||
|  | connection.close() | ||||||
|  |  | ||||||
|  | from mongoengine import Document, DictField, connect | ||||||
|  | connect("mongoengine_benchmark_test") | ||||||
|  |  | ||||||
|  | class Noddy(Document): | ||||||
|  |     fields = DictField() | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save() | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys]  # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print('-' * 100) | ||||||
|  |     print('MongoEngine: Creating 10000 dictionaries.') | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print('{}s'.format(t.timeit(1))) | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     fields = {} | ||||||
|  |     for j in range(20): | ||||||
|  |         fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.fields = fields | ||||||
|  |     noddy.save() | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys]  # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print('-' * 100) | ||||||
|  |     print('MongoEngine: Creating 10000 dictionaries (using a single field assignment).') | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print('{}s'.format(t.timeit(1))) | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save(write_concern={"w": 0}) | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print('-' * 100) | ||||||
|  |     print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}).') | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print('{}s'.format(t.timeit(1))) | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save(write_concern={"w": 0}, validate=False) | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print('-' * 100) | ||||||
|  |     print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}, validate=False).') | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print('{}s'.format(t.timeit(1))) | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in range(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save(force_insert=True, write_concern={"w": 0}, validate=False) | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print('-' * 100) | ||||||
|  |     print('MongoEngine: Creating 10000 dictionaries (force_insert=True, write_concern={"w": 0}, validate=False).') | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print('{}s'.format(t.timeit(1))) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | if __name__ == "__main__": | ||||||
|  |     main() | ||||||
| @@ -13,6 +13,7 @@ Documents | |||||||
|  |  | ||||||
| .. autoclass:: mongoengine.Document | .. autoclass:: mongoengine.Document | ||||||
|    :members: |    :members: | ||||||
|  |    :inherited-members: | ||||||
|  |  | ||||||
|    .. attribute:: objects |    .. attribute:: objects | ||||||
|  |  | ||||||
| @@ -21,15 +22,18 @@ Documents | |||||||
|  |  | ||||||
| .. autoclass:: mongoengine.EmbeddedDocument | .. autoclass:: mongoengine.EmbeddedDocument | ||||||
|    :members: |    :members: | ||||||
|  |    :inherited-members: | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.DynamicDocument | .. autoclass:: mongoengine.DynamicDocument | ||||||
|    :members: |    :members: | ||||||
|  |    :inherited-members: | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.DynamicEmbeddedDocument | .. autoclass:: mongoengine.DynamicEmbeddedDocument | ||||||
|    :members: |    :members: | ||||||
|  |    :inherited-members: | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.document.MapReduceDocument | .. autoclass:: mongoengine.document.MapReduceDocument | ||||||
|   :members: |    :members: | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.ValidationError | .. autoclass:: mongoengine.ValidationError | ||||||
|   :members: |   :members: | ||||||
|   | |||||||
| @@ -1,9 +1,109 @@ | |||||||
|  |  | ||||||
| ========= | ========= | ||||||
| Changelog | Changelog | ||||||
| ========= | ========= | ||||||
|  |  | ||||||
|  | Development | ||||||
|  | =========== | ||||||
|  | - (Fill this out as you fix issues and develop your features). | ||||||
|  |  | ||||||
|  | Changes in 0.18.1 | ||||||
|  | ================= | ||||||
|  | - Fix a bug introduced in 0.18.0 which was causing `.save()` to update all the fields | ||||||
|  |     instead of updating only the modified fields. This bug only occurs when using custom pk #2082 | ||||||
|  | - Add Python 3.7 in travis #2058 | ||||||
|  |  | ||||||
|  | Changes in 0.18.0 | ||||||
|  | ================= | ||||||
|  | - Drop support for EOL'd MongoDB v2.6, v3.0, and v3.2. | ||||||
|  | - MongoEngine now requires PyMongo >= v3.4. Travis CI now tests against MongoDB v3.4 – v3.6 and PyMongo v3.4 – v3.6 (#2017 #2066). | ||||||
|  | - Improve performance by avoiding a call to `to_mongo` in `Document.save()` #2049 | ||||||
|  | - Connection/disconnection improvements: | ||||||
|  |     - Expose `mongoengine.connection.disconnect` and `mongoengine.connection.disconnect_all` | ||||||
|  |     - Fix disconnecting #566 #1599 #605 #607 #1213 #565 | ||||||
|  |     - Improve documentation of `connect`/`disconnect` | ||||||
|  |     - Fix issue when using multiple connections to the same mongo with different credentials #2047 | ||||||
|  |     - `connect` fails immediately when db name contains invalid characters #2031 #1718 | ||||||
|  | - Fix the default write concern of `Document.save` that was overwriting the connection write concern #568 | ||||||
|  | - Fix querying on `List(EmbeddedDocument)` subclasses fields #1961 #1492 | ||||||
|  | - Fix querying on `(Generic)EmbeddedDocument` subclasses fields #475 | ||||||
|  | - Fix `QuerySet.aggregate` so that it takes limit and skip value into account #2029 | ||||||
|  | - Generate unique indices for `SortedListField` and `EmbeddedDocumentListFields` #2020 | ||||||
|  | - BREAKING CHANGE: Changed the behavior of a custom field validator (i.e `validation` parameter of a `Field`). It is now expected to raise a `ValidationError` instead of returning True/False #2050 | ||||||
|  | - BREAKING CHANGES (associated with connect/disconnect fixes): | ||||||
|  |     - Calling `connect` 2 times with the same alias and different parameter will raise an error (should call `disconnect` first). | ||||||
|  |     - `disconnect` now clears `mongoengine.connection._connection_settings`. | ||||||
|  |     - `disconnect` now clears the cached attribute `Document._collection`. | ||||||
|  | - BREAKING CHANGE: `EmbeddedDocument.save` & `.reload` is no longier exist #1552 | ||||||
|  |  | ||||||
|  | Changes in 0.17.0 | ||||||
|  | ================= | ||||||
|  | - Fix .only() working improperly after using .count() of the same instance of QuerySet | ||||||
|  | - Fix batch_size that was not copied when cloning a queryset object #2011 | ||||||
|  | - POTENTIAL BREAKING CHANGE: All result fields are now passed, including internal fields (_cls, _id) when using `QuerySet.as_pymongo` #1976 | ||||||
|  | - Document a BREAKING CHANGE introduced in 0.15.3 and not reported at that time (#1995) | ||||||
|  | - Fix InvalidStringData error when using modify on a BinaryField #1127 | ||||||
|  | - DEPRECATION: `EmbeddedDocument.save` & `.reload` are marked as deprecated and will be removed in a next version of mongoengine #1552 | ||||||
|  | - Fix test suite and CI to support MongoDB 3.4 #1445 | ||||||
|  | - Fix reference fields querying the database on each access if value contains orphan DBRefs | ||||||
|  |  | ||||||
|  | ================= | ||||||
|  | Changes in 0.16.3 | ||||||
|  | ================= | ||||||
|  | - Fix $push with $position operator not working with lists in embedded document #1965 | ||||||
|  |  | ||||||
|  | ================= | ||||||
|  | Changes in 0.16.2 | ||||||
|  | ================= | ||||||
|  | - Fix .save() that fails when called with write_concern=None (regression of 0.16.1) #1958 | ||||||
|  |  | ||||||
|  | ================= | ||||||
|  | Changes in 0.16.1 | ||||||
|  | ================= | ||||||
|  | - Fix `_cls` that is not set properly in Document constructor (regression) #1950 | ||||||
|  | - Fix bug in _delta method - Update of a ListField depends on an unrelated dynamic field update #1733 | ||||||
|  | - Remove deprecated `save()` method and used `insert_one()` #1899 | ||||||
|  |  | ||||||
|  | ================= | ||||||
|  | Changes in 0.16.0 | ||||||
|  | ================= | ||||||
|  | - Various improvements to the doc | ||||||
|  | - Improvement to code quality | ||||||
|  | - POTENTIAL BREAKING CHANGES: | ||||||
|  |     - EmbeddedDocumentField will no longer accept references to Document classes in its constructor #1661 | ||||||
|  |     - Get rid of the `basecls` parameter from the DictField constructor (dead code) #1876 | ||||||
|  |     - default value of ComplexDateTime is now None (and no longer the current datetime) #1368 | ||||||
|  | - Fix unhashable TypeError when referencing a Document with a compound key in an EmbeddedDocument #1685 | ||||||
|  | - Fix bug where an EmbeddedDocument with the same id as its parent would not be tracked for changes #1768 | ||||||
|  | - Fix the fact that bulk `insert()` was not setting primary keys of inserted documents instances #1919 | ||||||
|  | - Fix bug when referencing the abstract class in a ReferenceField #1920 | ||||||
|  | - Allow modification to the document made in pre_save_post_validation to be taken into account #1202 | ||||||
|  | - Replaced MongoDB 2.4 tests in CI by MongoDB 3.2 #1903 | ||||||
|  | - Fix side effects of using queryset.`no_dereference` on other documents #1677 | ||||||
|  | - Fix TypeError when using lazy django translation objects as translated choices #1879 | ||||||
|  | - Improve 2-3 codebase compatibility #1889 | ||||||
|  | - Fix the support for changing the default value of ComplexDateTime #1368 | ||||||
|  | - Improves error message in case an EmbeddedDocumentListField receives an EmbeddedDocument instance | ||||||
|  |     instead of a list #1877 | ||||||
|  | - Fix the Decimal operator inc/dec #1517 #1320 | ||||||
|  | - Ignore killcursors queries in `query_counter` context manager #1869 | ||||||
|  | - Fix the fact that `query_counter` was modifying the initial profiling_level in case it was != 0 #1870 | ||||||
|  | - Repaired the `no_sub_classes` context manager + fix the fact that it was swallowing exceptions #1865 | ||||||
|  | - Fix index creation error that was swallowed by hasattr under python2 #1688 | ||||||
|  | - QuerySet limit function behaviour: Passing 0 as parameter will return all the documents in the cursor #1611 | ||||||
|  | - bulk insert updates the ids of the input documents instances #1919 | ||||||
|  | - Fix an harmless bug related to GenericReferenceField where modifications in the generic-referenced document | ||||||
|  |     were tracked in the parent #1934 | ||||||
|  | - Improve validator of BinaryField #273 | ||||||
|  | - Implemented lazy regex compiling in Field classes to improve 'import mongoengine' performance #1806 | ||||||
|  | - Updated GridFSProxy.__str__  so that it would always print both the filename and grid_id #710 | ||||||
|  | - Add __repr__ to Q and QCombination #1843 | ||||||
|  | - fix bug in BaseList.__iter__ operator (was occuring when modifying a BaseList while iterating over it) #1676 | ||||||
|  | - Added field `DateField`#513 | ||||||
|  |  | ||||||
| Changes in 0.15.3 | Changes in 0.15.3 | ||||||
| ================= | ================= | ||||||
|  | -  BREAKING CHANGES: `Queryset.update/update_one` methods now returns an UpdateResult when `full_result=True` is provided and no longer a dict (relates to #1491) | ||||||
| -  Subfield resolve error in generic_emdedded_document query #1651 #1652 | -  Subfield resolve error in generic_emdedded_document query #1651 #1652 | ||||||
| -  use each modifier only with $position #1673 #1675 | -  use each modifier only with $position #1673 #1675 | ||||||
| -  Improve LazyReferenceField and GenericLazyReferenceField with nested fields #1704 | -  Improve LazyReferenceField and GenericLazyReferenceField with nested fields #1704 | ||||||
|   | |||||||
| @@ -45,27 +45,27 @@ post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs' | |||||||
| post2.tags = ['mongoengine'] | post2.tags = ['mongoengine'] | ||||||
| post2.save() | post2.save() | ||||||
|  |  | ||||||
| print 'ALL POSTS' | print('ALL POSTS') | ||||||
| print | print() | ||||||
| for post in Post.objects: | for post in Post.objects: | ||||||
|     print post.title |     print(post.title) | ||||||
|     #print '=' * post.title.count() |     #print '=' * post.title.count() | ||||||
|     print "=" * 20 |     print("=" * 20) | ||||||
|  |  | ||||||
|     if isinstance(post, TextPost): |     if isinstance(post, TextPost): | ||||||
|         print post.content |         print(post.content) | ||||||
|  |  | ||||||
|     if isinstance(post, LinkPost): |     if isinstance(post, LinkPost): | ||||||
|         print 'Link:', post.link_url |         print('Link:', post.link_url) | ||||||
|  |  | ||||||
|     print |     print() | ||||||
| print | print() | ||||||
|  |  | ||||||
| print 'POSTS TAGGED \'MONGODB\'' | print('POSTS TAGGED \'MONGODB\'') | ||||||
| print | print() | ||||||
| for post in Post.objects(tags='mongodb'): | for post in Post.objects(tags='mongodb'): | ||||||
|     print post.title |     print(post.title) | ||||||
| print | print() | ||||||
|  |  | ||||||
| num_posts = Post.objects(tags='mongodb').count() | num_posts = Post.objects(tags='mongodb').count() | ||||||
| print 'Found %d posts with tag "mongodb"' % num_posts | print('Found %d posts with tag "mongodb"' % num_posts) | ||||||
|   | |||||||
| @@ -4,9 +4,11 @@ | |||||||
| Connecting to MongoDB | Connecting to MongoDB | ||||||
| ===================== | ===================== | ||||||
|  |  | ||||||
| To connect to a running instance of :program:`mongod`, use the | Connections in MongoEngine are registered globally and are identified with aliases. | ||||||
| :func:`~mongoengine.connect` function. The first argument is the name of the | If no `alias` is provided during the connection, it will use "default" as alias. | ||||||
| database to connect to:: |  | ||||||
|  | To connect to a running instance of :program:`mongod`, use the :func:`~mongoengine.connect` | ||||||
|  | function. The first argument is the name of the database to connect to:: | ||||||
|  |  | ||||||
|     from mongoengine import connect |     from mongoengine import connect | ||||||
|     connect('project1') |     connect('project1') | ||||||
| @@ -18,10 +20,10 @@ provide the :attr:`host` and :attr:`port` arguments to | |||||||
|  |  | ||||||
|     connect('project1', host='192.168.1.35', port=12345) |     connect('project1', host='192.168.1.35', port=12345) | ||||||
|  |  | ||||||
| If the database requires authentication, :attr:`username` and :attr:`password` | If the database requires authentication, :attr:`username`, :attr:`password` | ||||||
| arguments should be provided:: | and :attr:`authentication_source` arguments should be provided:: | ||||||
|  |  | ||||||
|     connect('project1', username='webapp', password='pwd123') |     connect('project1', username='webapp', password='pwd123', authentication_source='admin') | ||||||
|  |  | ||||||
| URI style connections are also supported -- just supply the URI as | URI style connections are also supported -- just supply the URI as | ||||||
| the :attr:`host` to | the :attr:`host` to | ||||||
| @@ -42,6 +44,9 @@ the :attr:`host` to | |||||||
|     will establish connection to ``production`` database using |     will establish connection to ``production`` database using | ||||||
|     ``admin`` username and ``qwerty`` password. |     ``admin`` username and ``qwerty`` password. | ||||||
|  |  | ||||||
|  | .. note:: Calling :func:`~mongoengine.connect` without argument will establish | ||||||
|  |     a connection to the "test" database by default | ||||||
|  |  | ||||||
| Replica Sets | Replica Sets | ||||||
| ============ | ============ | ||||||
|  |  | ||||||
| @@ -71,28 +76,61 @@ is used. | |||||||
| In the background this uses :func:`~mongoengine.register_connection` to | In the background this uses :func:`~mongoengine.register_connection` to | ||||||
| store the data and you can register all aliases up front if required. | store the data and you can register all aliases up front if required. | ||||||
|  |  | ||||||
| Individual documents can also support multiple databases by providing a | Documents defined in different database | ||||||
|  | --------------------------------------- | ||||||
|  | Individual documents can be attached to different databases by providing a | ||||||
| `db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef` | `db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef` | ||||||
| objects to point across databases and collections. Below is an example schema, | objects to point across databases and collections. Below is an example schema, | ||||||
| using 3 different databases to store data:: | using 3 different databases to store data:: | ||||||
|  |  | ||||||
|  |         connect(alias='user-db-alias', db='user-db') | ||||||
|  |         connect(alias='book-db-alias', db='book-db') | ||||||
|  |         connect(alias='users-books-db-alias', db='users-books-db') | ||||||
|  |          | ||||||
|         class User(Document): |         class User(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|             meta = {'db_alias': 'user-db'} |             meta = {'db_alias': 'user-db-alias'} | ||||||
|  |  | ||||||
|         class Book(Document): |         class Book(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|             meta = {'db_alias': 'book-db'} |             meta = {'db_alias': 'book-db-alias'} | ||||||
|  |  | ||||||
|         class AuthorBooks(Document): |         class AuthorBooks(Document): | ||||||
|             author = ReferenceField(User) |             author = ReferenceField(User) | ||||||
|             book = ReferenceField(Book) |             book = ReferenceField(Book) | ||||||
|  |  | ||||||
|             meta = {'db_alias': 'users-books-db'} |             meta = {'db_alias': 'users-books-db-alias'} | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Disconnecting an existing connection | ||||||
|  | ------------------------------------ | ||||||
|  | The function :func:`~mongoengine.disconnect` can be used to | ||||||
|  | disconnect a particular connection. This can be used to change a | ||||||
|  | connection globally:: | ||||||
|  |  | ||||||
|  |         from mongoengine import connect, disconnect | ||||||
|  |         connect('a_db', alias='db1') | ||||||
|  |  | ||||||
|  |         class User(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {'db_alias': 'db1'} | ||||||
|  |  | ||||||
|  |         disconnect(alias='db1') | ||||||
|  |  | ||||||
|  |         connect('another_db', alias='db1') | ||||||
|  |  | ||||||
|  | .. note:: Calling :func:`~mongoengine.disconnect` without argument | ||||||
|  |     will disconnect the "default" connection | ||||||
|  |  | ||||||
|  | .. note:: Since connections gets registered globally, it is important | ||||||
|  |     to use the `disconnect` function from MongoEngine and not the | ||||||
|  |     `disconnect()` method of an existing connection (pymongo.MongoClient) | ||||||
|  |  | ||||||
|  | .. note:: :class:`~mongoengine.Document` are caching the pymongo collection. | ||||||
|  |     using `disconnect` ensures that it gets cleaned as well | ||||||
|  |  | ||||||
| Context Managers | Context Managers | ||||||
| ================ | ================ | ||||||
| Sometimes you may want to switch the database or collection to query against. | Sometimes you may want to switch the database or collection to query against. | ||||||
| @@ -119,7 +157,7 @@ access to the same User document across databases:: | |||||||
|  |  | ||||||
| Switch Collection | Switch Collection | ||||||
| ----------------- | ----------------- | ||||||
| The :class:`~mongoengine.context_managers.switch_collection` context manager | The :func:`~mongoengine.context_managers.switch_collection` context manager | ||||||
| allows you to change the collection for a given class allowing quick and easy | allows you to change the collection for a given class allowing quick and easy | ||||||
| access to the same Group document across collection:: | access to the same Group document across collection:: | ||||||
|  |  | ||||||
|   | |||||||
| @@ -85,6 +85,7 @@ are as follows: | |||||||
| * :class:`~mongoengine.fields.ImageField` | * :class:`~mongoengine.fields.ImageField` | ||||||
| * :class:`~mongoengine.fields.IntField` | * :class:`~mongoengine.fields.IntField` | ||||||
| * :class:`~mongoengine.fields.ListField` | * :class:`~mongoengine.fields.ListField` | ||||||
|  | * :class:`~mongoengine.fields.LongField` | ||||||
| * :class:`~mongoengine.fields.MapField` | * :class:`~mongoengine.fields.MapField` | ||||||
| * :class:`~mongoengine.fields.ObjectIdField` | * :class:`~mongoengine.fields.ObjectIdField` | ||||||
| * :class:`~mongoengine.fields.ReferenceField` | * :class:`~mongoengine.fields.ReferenceField` | ||||||
| @@ -155,7 +156,7 @@ arguments can be set on all fields: | |||||||
|     An iterable (e.g. list, tuple or set) of choices to which the value of this |     An iterable (e.g. list, tuple or set) of choices to which the value of this | ||||||
|     field should be limited. |     field should be limited. | ||||||
|  |  | ||||||
|     Can be either be a nested tuples of value (stored in mongo) and a |     Can either be nested tuples of value (stored in mongo) and a | ||||||
|     human readable key :: |     human readable key :: | ||||||
|  |  | ||||||
|         SIZE = (('S', 'Small'), |         SIZE = (('S', 'Small'), | ||||||
| @@ -175,6 +176,21 @@ arguments can be set on all fields: | |||||||
|         class Shirt(Document): |         class Shirt(Document): | ||||||
|             size = StringField(max_length=3, choices=SIZE) |             size = StringField(max_length=3, choices=SIZE) | ||||||
|  |  | ||||||
|  | :attr:`validation` (Optional) | ||||||
|  |     A callable to validate the value of the field. | ||||||
|  |     The callable takes the value as parameter and should raise a ValidationError | ||||||
|  |     if validation fails | ||||||
|  |  | ||||||
|  |     e.g :: | ||||||
|  |  | ||||||
|  |         def _not_empty(val): | ||||||
|  |             if not val: | ||||||
|  |                 raise ValidationError('value can not be empty') | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField(validation=_not_empty) | ||||||
|  |  | ||||||
|  |  | ||||||
| :attr:`**kwargs` (Optional) | :attr:`**kwargs` (Optional) | ||||||
|     You can supply additional metadata as arbitrary additional keyword |     You can supply additional metadata as arbitrary additional keyword | ||||||
|     arguments.  You can not override existing attributes, however.  Common |     arguments.  You can not override existing attributes, however.  Common | ||||||
| @@ -492,7 +508,9 @@ the field name with a **#**:: | |||||||
|             ] |             ] | ||||||
|         } |         } | ||||||
|  |  | ||||||
| If a dictionary is passed then the following options are available: | If a dictionary is passed then additional options become available. Valid options include, | ||||||
|  | but are not limited to: | ||||||
|  |  | ||||||
|  |  | ||||||
| :attr:`fields` (Default: None) | :attr:`fields` (Default: None) | ||||||
|     The fields to index. Specified in the same format as described above. |     The fields to index. Specified in the same format as described above. | ||||||
| @@ -513,8 +531,15 @@ If a dictionary is passed then the following options are available: | |||||||
|     Allows you to automatically expire data from a collection by setting the |     Allows you to automatically expire data from a collection by setting the | ||||||
|     time in seconds to expire the a field. |     time in seconds to expire the a field. | ||||||
|  |  | ||||||
|  | :attr:`name` (Optional) | ||||||
|  |     Allows you to specify a name for the index | ||||||
|  |  | ||||||
|  | :attr:`collation` (Optional) | ||||||
|  |     Allows to create case insensitive indexes (MongoDB v3.4+ only) | ||||||
|  |  | ||||||
| .. note:: | .. note:: | ||||||
|  |  | ||||||
|  |     Additional options are forwarded as **kwargs to pymongo's create_index method. | ||||||
|     Inheritance adds extra fields indices see: :ref:`document-inheritance`. |     Inheritance adds extra fields indices see: :ref:`document-inheritance`. | ||||||
|  |  | ||||||
| Global index default options | Global index default options | ||||||
| @@ -526,7 +551,7 @@ There are a few top level defaults for all indexes that can be set:: | |||||||
|         title = StringField() |         title = StringField() | ||||||
|         rating = StringField() |         rating = StringField() | ||||||
|         meta = { |         meta = { | ||||||
|             'index_options': {}, |             'index_opts': {}, | ||||||
|             'index_background': True, |             'index_background': True, | ||||||
|             'index_cls': False, |             'index_cls': False, | ||||||
|             'auto_create_index': True, |             'auto_create_index': True, | ||||||
| @@ -534,8 +559,8 @@ There are a few top level defaults for all indexes that can be set:: | |||||||
|         } |         } | ||||||
|  |  | ||||||
|  |  | ||||||
| :attr:`index_options` (Optional) | :attr:`index_opts` (Optional) | ||||||
|     Set any default index options - see the `full options list <http://docs.mongodb.org/manual/reference/method/db.collection.ensureIndex/#db.collection.ensureIndex>`_ |     Set any default index options - see the `full options list <https://docs.mongodb.com/manual/reference/method/db.collection.createIndex/#db.collection.createIndex>`_ | ||||||
|  |  | ||||||
| :attr:`index_background` (Optional) | :attr:`index_background` (Optional) | ||||||
|     Set the default value for if an index should be indexed in the background |     Set the default value for if an index should be indexed in the background | ||||||
| @@ -551,8 +576,7 @@ There are a few top level defaults for all indexes that can be set:: | |||||||
|  |  | ||||||
| :attr:`index_drop_dups` (Optional) | :attr:`index_drop_dups` (Optional) | ||||||
|     Set the default value for if an index should drop duplicates |     Set the default value for if an index should drop duplicates | ||||||
|  |     Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning | ||||||
| .. note:: Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning |  | ||||||
|     and has no effect |     and has no effect | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -734,6 +758,9 @@ document.:: | |||||||
| .. note:: From 0.8 onwards :attr:`allow_inheritance` defaults | .. note:: From 0.8 onwards :attr:`allow_inheritance` defaults | ||||||
|           to False, meaning you must set it to True to use inheritance. |           to False, meaning you must set it to True to use inheritance. | ||||||
|  |  | ||||||
|  |           Setting :attr:`allow_inheritance` to True should also be used in | ||||||
|  |           :class:`~mongoengine.EmbeddedDocument` class in case you need to subclass it | ||||||
|  |  | ||||||
| Working with existing data | Working with existing data | ||||||
| -------------------------- | -------------------------- | ||||||
| As MongoEngine no longer defaults to needing :attr:`_cls`, you can quickly and | As MongoEngine no longer defaults to needing :attr:`_cls`, you can quickly and | ||||||
|   | |||||||
| @@ -57,7 +57,8 @@ document values for example:: | |||||||
|  |  | ||||||
|         def clean(self): |         def clean(self): | ||||||
|             """Ensures that only published essays have a `pub_date` and |             """Ensures that only published essays have a `pub_date` and | ||||||
|             automatically sets the pub_date if published and not set""" |             automatically sets `pub_date` if essay is published and `pub_date` | ||||||
|  |             is not set""" | ||||||
|             if self.status == 'Draft' and self.pub_date is not None: |             if self.status == 'Draft' and self.pub_date is not None: | ||||||
|                 msg = 'Draft entries should not have a publication date.' |                 msg = 'Draft entries should not have a publication date.' | ||||||
|                 raise ValidationError(msg) |                 raise ValidationError(msg) | ||||||
|   | |||||||
| @@ -53,7 +53,8 @@ Deletion | |||||||
|  |  | ||||||
| Deleting stored files is achieved with the :func:`delete` method:: | Deleting stored files is achieved with the :func:`delete` method:: | ||||||
|  |  | ||||||
|     marmot.photo.delete() |     marmot.photo.delete()    # Deletes the GridFS document | ||||||
|  |     marmot.save()            # Saves the GridFS reference (being None) contained in the marmot instance | ||||||
|  |  | ||||||
| .. warning:: | .. warning:: | ||||||
|  |  | ||||||
| @@ -71,4 +72,5 @@ Files can be replaced with the :func:`replace` method. This works just like | |||||||
| the :func:`put` method so even metadata can (and should) be replaced:: | the :func:`put` method so even metadata can (and should) be replaced:: | ||||||
|  |  | ||||||
|     another_marmot = open('another_marmot.png', 'rb') |     another_marmot = open('another_marmot.png', 'rb') | ||||||
|     marmot.photo.replace(another_marmot, content_type='image/png') |     marmot.photo.replace(another_marmot, content_type='image/png')  # Replaces the GridFS document | ||||||
|  |     marmot.save()                                                   # Replaces the GridFS reference contained in marmot instance | ||||||
|   | |||||||
| @@ -19,3 +19,30 @@ or with an alias: | |||||||
|  |  | ||||||
|     connect('mongoenginetest', host='mongomock://localhost', alias='testdb') |     connect('mongoenginetest', host='mongomock://localhost', alias='testdb') | ||||||
|     conn = get_connection('testdb') |     conn = get_connection('testdb') | ||||||
|  |  | ||||||
|  | Example of test file: | ||||||
|  | -------- | ||||||
|  | .. code-block:: python | ||||||
|  |  | ||||||
|  |     import unittest | ||||||
|  |     from mongoengine import connect, disconnect | ||||||
|  |  | ||||||
|  |     class Person(Document): | ||||||
|  |         name = StringField() | ||||||
|  |  | ||||||
|  |     class TestPerson(unittest.TestCase): | ||||||
|  |  | ||||||
|  |         @classmethod | ||||||
|  |         def setUpClass(cls): | ||||||
|  |             connect('mongoenginetest', host='mongomock://localhost') | ||||||
|  |  | ||||||
|  |         @classmethod | ||||||
|  |         def tearDownClass(cls): | ||||||
|  |            disconnect() | ||||||
|  |  | ||||||
|  |         def test_thing(self): | ||||||
|  |             pers = Person(name='John') | ||||||
|  |             pers.save() | ||||||
|  |  | ||||||
|  |             fresh_pers = Person.objects().first() | ||||||
|  |             self.assertEqual(fresh_pers.name, 'John') | ||||||
|   | |||||||
| @@ -64,7 +64,7 @@ Available operators are as follows: | |||||||
| * ``gt`` -- greater than | * ``gt`` -- greater than | ||||||
| * ``gte`` -- greater than or equal to | * ``gte`` -- greater than or equal to | ||||||
| * ``not`` -- negate a standard check, may be used before other operators (e.g. | * ``not`` -- negate a standard check, may be used before other operators (e.g. | ||||||
|   ``Q(age__not__mod=5)``) |   ``Q(age__not__mod=(5, 0))``) | ||||||
| * ``in`` -- value is in list (a list of values should be provided) | * ``in`` -- value is in list (a list of values should be provided) | ||||||
| * ``nin`` -- value is not in list (a list of values should be provided) | * ``nin`` -- value is not in list (a list of values should be provided) | ||||||
| * ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values | * ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values | ||||||
| @@ -456,14 +456,14 @@ data. To turn off dereferencing of the results of a query use | |||||||
| :func:`~mongoengine.queryset.QuerySet.no_dereference` on the queryset like so:: | :func:`~mongoengine.queryset.QuerySet.no_dereference` on the queryset like so:: | ||||||
|  |  | ||||||
|     post = Post.objects.no_dereference().first() |     post = Post.objects.no_dereference().first() | ||||||
|     assert(isinstance(post.author, ObjectId)) |     assert(isinstance(post.author, DBRef)) | ||||||
|  |  | ||||||
| You can also turn off all dereferencing for a fixed period by using the | You can also turn off all dereferencing for a fixed period by using the | ||||||
| :class:`~mongoengine.context_managers.no_dereference` context manager:: | :class:`~mongoengine.context_managers.no_dereference` context manager:: | ||||||
|  |  | ||||||
|     with no_dereference(Post) as Post: |     with no_dereference(Post) as Post: | ||||||
|         post = Post.objects.first() |         post = Post.objects.first() | ||||||
|         assert(isinstance(post.author, ObjectId)) |         assert(isinstance(post.author, DBRef)) | ||||||
|  |  | ||||||
|     # Outside the context manager dereferencing occurs. |     # Outside the context manager dereferencing occurs. | ||||||
|     assert(isinstance(post.author, User)) |     assert(isinstance(post.author, User)) | ||||||
|   | |||||||
| @@ -113,6 +113,10 @@ handlers within your subclass:: | |||||||
|     signals.pre_save.connect(Author.pre_save, sender=Author) |     signals.pre_save.connect(Author.pre_save, sender=Author) | ||||||
|     signals.post_save.connect(Author.post_save, sender=Author) |     signals.post_save.connect(Author.post_save, sender=Author) | ||||||
|  |  | ||||||
|  | .. warning:: | ||||||
|  |  | ||||||
|  |     Note that EmbeddedDocument only supports pre/post_init signals. pre/post_save, etc should be attached to Document's class only. Attaching pre_save to an EmbeddedDocument is ignored silently. | ||||||
|  |  | ||||||
| Finally, you can also use this small decorator to quickly create a number of | Finally, you can also use this small decorator to quickly create a number of | ||||||
| signals and attach them to your :class:`~mongoengine.Document` or | signals and attach them to your :class:`~mongoengine.Document` or | ||||||
| :class:`~mongoengine.EmbeddedDocument` subclasses as class decorators:: | :class:`~mongoengine.EmbeddedDocument` subclasses as class decorators:: | ||||||
|   | |||||||
| @@ -6,6 +6,11 @@ Development | |||||||
| *********** | *********** | ||||||
| (Fill this out whenever you introduce breaking changes to MongoEngine) | (Fill this out whenever you introduce breaking changes to MongoEngine) | ||||||
|  |  | ||||||
|  | URLField's constructor no longer takes `verify_exists` | ||||||
|  |  | ||||||
|  | 0.15.0 | ||||||
|  | ****** | ||||||
|  |  | ||||||
| 0.14.0 | 0.14.0 | ||||||
| ****** | ****** | ||||||
| This release includes a few bug fixes and a significant code cleanup. The most | This release includes a few bug fixes and a significant code cleanup. The most | ||||||
|   | |||||||
| @@ -23,12 +23,13 @@ __all__ = (list(document.__all__) + list(fields.__all__) + | |||||||
|            list(signals.__all__) + list(errors.__all__)) |            list(signals.__all__) + list(errors.__all__)) | ||||||
|  |  | ||||||
|  |  | ||||||
| VERSION = (0, 15, 3) | VERSION = (0, 18, 1) | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_version(): | def get_version(): | ||||||
|     """Return the VERSION as a string, e.g. for VERSION == (0, 10, 7), |     """Return the VERSION as a string. | ||||||
|     return '0.10.7'. |  | ||||||
|  |     For example, if `VERSION == (0, 10, 7)`, return '0.10.7'. | ||||||
|     """ |     """ | ||||||
|     return '.'.join(map(str, VERSION)) |     return '.'.join(map(str, VERSION)) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -3,23 +3,23 @@ from mongoengine.errors import NotRegistered | |||||||
| __all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry') | __all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry') | ||||||
|  |  | ||||||
|  |  | ||||||
| UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'mul', | UPDATE_OPERATORS = {'set', 'unset', 'inc', 'dec', 'mul', | ||||||
|                         'pop', 'push', 'push_all', 'pull', |                     'pop', 'push', 'push_all', 'pull', | ||||||
|                         'pull_all', 'add_to_set', 'set_on_insert', |                     'pull_all', 'add_to_set', 'set_on_insert', | ||||||
|                         'min', 'max', 'rename']) |                     'min', 'max', 'rename'} | ||||||
|  |  | ||||||
|  |  | ||||||
| _document_registry = {} | _document_registry = {} | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_document(name): | def get_document(name): | ||||||
|     """Get a document class by name.""" |     """Get a registered Document class by name.""" | ||||||
|     doc = _document_registry.get(name, None) |     doc = _document_registry.get(name, None) | ||||||
|     if not doc: |     if not doc: | ||||||
|         # Possible old style name |         # Possible old style name | ||||||
|         single_end = name.split('.')[-1] |         single_end = name.split('.')[-1] | ||||||
|         compound_end = '.%s' % single_end |         compound_end = '.%s' % single_end | ||||||
|         possible_match = [k for k in _document_registry.keys() |         possible_match = [k for k in _document_registry | ||||||
|                           if k.endswith(compound_end) or k == single_end] |                           if k.endswith(compound_end) or k == single_end] | ||||||
|         if len(possible_match) == 1: |         if len(possible_match) == 1: | ||||||
|             doc = _document_registry.get(possible_match.pop(), None) |             doc = _document_registry.get(possible_match.pop(), None) | ||||||
| @@ -30,3 +30,12 @@ def get_document(name): | |||||||
|             been imported? |             been imported? | ||||||
|         """.strip() % name) |         """.strip() % name) | ||||||
|     return doc |     return doc | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _get_documents_by_db(connection_alias, default_connection_alias): | ||||||
|  |     """Get all registered Documents class attached to a given database""" | ||||||
|  |     def get_doc_alias(doc_cls): | ||||||
|  |         return doc_cls._meta.get('db_alias', default_connection_alias) | ||||||
|  |  | ||||||
|  |     return [doc_cls for doc_cls in _document_registry.values() | ||||||
|  |             if get_doc_alias(doc_cls) == connection_alias] | ||||||
|   | |||||||
| @@ -1,13 +1,31 @@ | |||||||
| import itertools |  | ||||||
| import weakref | import weakref | ||||||
|  |  | ||||||
| from bson import DBRef | from bson import DBRef | ||||||
| import six | import six | ||||||
|  | from six import iteritems | ||||||
|  |  | ||||||
| from mongoengine.common import _import_class | from mongoengine.common import _import_class | ||||||
| from mongoengine.errors import DoesNotExist, MultipleObjectsReturned | from mongoengine.errors import DoesNotExist, MultipleObjectsReturned | ||||||
|  |  | ||||||
| __all__ = ('BaseDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference') | __all__ = ('BaseDict', 'StrictDict', 'BaseList', 'EmbeddedDocumentList', 'LazyReference') | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def mark_as_changed_wrapper(parent_method): | ||||||
|  |     """Decorators that ensures _mark_as_changed method gets called""" | ||||||
|  |     def wrapper(self, *args, **kwargs): | ||||||
|  |         result = parent_method(self, *args, **kwargs)   # Can't use super() in the decorator | ||||||
|  |         self._mark_as_changed() | ||||||
|  |         return result | ||||||
|  |     return wrapper | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def mark_key_as_changed_wrapper(parent_method): | ||||||
|  |     """Decorators that ensures _mark_as_changed method gets called with the key argument""" | ||||||
|  |     def wrapper(self, key, *args, **kwargs): | ||||||
|  |         result = parent_method(self, key, *args, **kwargs)   # Can't use super() in the decorator | ||||||
|  |         self._mark_as_changed(key) | ||||||
|  |         return result | ||||||
|  |     return wrapper | ||||||
|  |  | ||||||
|  |  | ||||||
| class BaseDict(dict): | class BaseDict(dict): | ||||||
| @@ -18,46 +36,36 @@ class BaseDict(dict): | |||||||
|     _name = None |     _name = None | ||||||
|  |  | ||||||
|     def __init__(self, dict_items, instance, name): |     def __init__(self, dict_items, instance, name): | ||||||
|         Document = _import_class('Document') |         BaseDocument = _import_class('BaseDocument') | ||||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') |  | ||||||
|  |  | ||||||
|         if isinstance(instance, (Document, EmbeddedDocument)): |         if isinstance(instance, BaseDocument): | ||||||
|             self._instance = weakref.proxy(instance) |             self._instance = weakref.proxy(instance) | ||||||
|         self._name = name |         self._name = name | ||||||
|         super(BaseDict, self).__init__(dict_items) |         super(BaseDict, self).__init__(dict_items) | ||||||
|  |  | ||||||
|     def __getitem__(self, key, *args, **kwargs): |     def get(self, key, default=None): | ||||||
|  |         # get does not use __getitem__ by default so we must override it as well | ||||||
|  |         try: | ||||||
|  |             return self.__getitem__(key) | ||||||
|  |         except KeyError: | ||||||
|  |             return default | ||||||
|  |  | ||||||
|  |     def __getitem__(self, key): | ||||||
|         value = super(BaseDict, self).__getitem__(key) |         value = super(BaseDict, self).__getitem__(key) | ||||||
|  |  | ||||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') |         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||||
|         if isinstance(value, EmbeddedDocument) and value._instance is None: |         if isinstance(value, EmbeddedDocument) and value._instance is None: | ||||||
|             value._instance = self._instance |             value._instance = self._instance | ||||||
|         elif not isinstance(value, BaseDict) and isinstance(value, dict): |         elif isinstance(value, dict) and not isinstance(value, BaseDict): | ||||||
|             value = BaseDict(value, None, '%s.%s' % (self._name, key)) |             value = BaseDict(value, None, '%s.%s' % (self._name, key)) | ||||||
|             super(BaseDict, self).__setitem__(key, value) |             super(BaseDict, self).__setitem__(key, value) | ||||||
|             value._instance = self._instance |             value._instance = self._instance | ||||||
|         elif not isinstance(value, BaseList) and isinstance(value, list): |         elif isinstance(value, list) and not isinstance(value, BaseList): | ||||||
|             value = BaseList(value, None, '%s.%s' % (self._name, key)) |             value = BaseList(value, None, '%s.%s' % (self._name, key)) | ||||||
|             super(BaseDict, self).__setitem__(key, value) |             super(BaseDict, self).__setitem__(key, value) | ||||||
|             value._instance = self._instance |             value._instance = self._instance | ||||||
|         return value |         return value | ||||||
|  |  | ||||||
|     def __setitem__(self, key, value, *args, **kwargs): |  | ||||||
|         self._mark_as_changed(key) |  | ||||||
|         return super(BaseDict, self).__setitem__(key, value) |  | ||||||
|  |  | ||||||
|     def __delete__(self, *args, **kwargs): |  | ||||||
|         self._mark_as_changed() |  | ||||||
|         return super(BaseDict, self).__delete__(*args, **kwargs) |  | ||||||
|  |  | ||||||
|     def __delitem__(self, key, *args, **kwargs): |  | ||||||
|         self._mark_as_changed(key) |  | ||||||
|         return super(BaseDict, self).__delitem__(key) |  | ||||||
|  |  | ||||||
|     def __delattr__(self, key, *args, **kwargs): |  | ||||||
|         self._mark_as_changed(key) |  | ||||||
|         return super(BaseDict, self).__delattr__(key) |  | ||||||
|  |  | ||||||
|     def __getstate__(self): |     def __getstate__(self): | ||||||
|         self.instance = None |         self.instance = None | ||||||
|         self._dereferenced = False |         self._dereferenced = False | ||||||
| @@ -67,25 +75,14 @@ class BaseDict(dict): | |||||||
|         self = state |         self = state | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
|     def clear(self, *args, **kwargs): |     __setitem__ = mark_key_as_changed_wrapper(dict.__setitem__) | ||||||
|         self._mark_as_changed() |     __delattr__ = mark_key_as_changed_wrapper(dict.__delattr__) | ||||||
|         return super(BaseDict, self).clear() |     __delitem__ = mark_key_as_changed_wrapper(dict.__delitem__) | ||||||
|  |     pop = mark_as_changed_wrapper(dict.pop) | ||||||
|     def pop(self, *args, **kwargs): |     clear = mark_as_changed_wrapper(dict.clear) | ||||||
|         self._mark_as_changed() |     update = mark_as_changed_wrapper(dict.update) | ||||||
|         return super(BaseDict, self).pop(*args, **kwargs) |     popitem = mark_as_changed_wrapper(dict.popitem) | ||||||
|  |     setdefault = mark_as_changed_wrapper(dict.setdefault) | ||||||
|     def popitem(self, *args, **kwargs): |  | ||||||
|         self._mark_as_changed() |  | ||||||
|         return super(BaseDict, self).popitem() |  | ||||||
|  |  | ||||||
|     def setdefault(self, *args, **kwargs): |  | ||||||
|         self._mark_as_changed() |  | ||||||
|         return super(BaseDict, self).setdefault(*args, **kwargs) |  | ||||||
|  |  | ||||||
|     def update(self, *args, **kwargs): |  | ||||||
|         self._mark_as_changed() |  | ||||||
|         return super(BaseDict, self).update(*args, **kwargs) |  | ||||||
|  |  | ||||||
|     def _mark_as_changed(self, key=None): |     def _mark_as_changed(self, key=None): | ||||||
|         if hasattr(self._instance, '_mark_as_changed'): |         if hasattr(self._instance, '_mark_as_changed'): | ||||||
| @@ -103,52 +100,39 @@ class BaseList(list): | |||||||
|     _name = None |     _name = None | ||||||
|  |  | ||||||
|     def __init__(self, list_items, instance, name): |     def __init__(self, list_items, instance, name): | ||||||
|         Document = _import_class('Document') |         BaseDocument = _import_class('BaseDocument') | ||||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') |  | ||||||
|  |  | ||||||
|         if isinstance(instance, (Document, EmbeddedDocument)): |         if isinstance(instance, BaseDocument): | ||||||
|             self._instance = weakref.proxy(instance) |             self._instance = weakref.proxy(instance) | ||||||
|         self._name = name |         self._name = name | ||||||
|         super(BaseList, self).__init__(list_items) |         super(BaseList, self).__init__(list_items) | ||||||
|  |  | ||||||
|     def __getitem__(self, key, *args, **kwargs): |     def __getitem__(self, key): | ||||||
|         value = super(BaseList, self).__getitem__(key) |         value = super(BaseList, self).__getitem__(key) | ||||||
|  |  | ||||||
|  |         if isinstance(key, slice): | ||||||
|  |             # When receiving a slice operator, we don't convert the structure and bind | ||||||
|  |             # to parent's instance. This is buggy for now but would require more work to be handled properly | ||||||
|  |             return value | ||||||
|  |  | ||||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') |         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||||
|         if isinstance(value, EmbeddedDocument) and value._instance is None: |         if isinstance(value, EmbeddedDocument) and value._instance is None: | ||||||
|             value._instance = self._instance |             value._instance = self._instance | ||||||
|         elif not isinstance(value, BaseDict) and isinstance(value, dict): |         elif isinstance(value, dict) and not isinstance(value, BaseDict): | ||||||
|  |             # Replace dict by BaseDict | ||||||
|             value = BaseDict(value, None, '%s.%s' % (self._name, key)) |             value = BaseDict(value, None, '%s.%s' % (self._name, key)) | ||||||
|             super(BaseList, self).__setitem__(key, value) |             super(BaseList, self).__setitem__(key, value) | ||||||
|             value._instance = self._instance |             value._instance = self._instance | ||||||
|         elif not isinstance(value, BaseList) and isinstance(value, list): |         elif isinstance(value, list) and not isinstance(value, BaseList): | ||||||
|  |             # Replace list by BaseList | ||||||
|             value = BaseList(value, None, '%s.%s' % (self._name, key)) |             value = BaseList(value, None, '%s.%s' % (self._name, key)) | ||||||
|             super(BaseList, self).__setitem__(key, value) |             super(BaseList, self).__setitem__(key, value) | ||||||
|             value._instance = self._instance |             value._instance = self._instance | ||||||
|         return value |         return value | ||||||
|  |  | ||||||
|     def __iter__(self): |     def __iter__(self): | ||||||
|         for i in six.moves.range(self.__len__()): |         for v in super(BaseList, self).__iter__(): | ||||||
|             yield self[i] |             yield v | ||||||
|  |  | ||||||
|     def __setitem__(self, key, value, *args, **kwargs): |  | ||||||
|         if isinstance(key, slice): |  | ||||||
|             self._mark_as_changed() |  | ||||||
|         else: |  | ||||||
|             self._mark_as_changed(key) |  | ||||||
|         return super(BaseList, self).__setitem__(key, value) |  | ||||||
|  |  | ||||||
|     def __delitem__(self, key, *args, **kwargs): |  | ||||||
|         self._mark_as_changed() |  | ||||||
|         return super(BaseList, self).__delitem__(key) |  | ||||||
|  |  | ||||||
|     def __setslice__(self, *args, **kwargs): |  | ||||||
|         self._mark_as_changed() |  | ||||||
|         return super(BaseList, self).__setslice__(*args, **kwargs) |  | ||||||
|  |  | ||||||
|     def __delslice__(self, *args, **kwargs): |  | ||||||
|         self._mark_as_changed() |  | ||||||
|         return super(BaseList, self).__delslice__(*args, **kwargs) |  | ||||||
|  |  | ||||||
|     def __getstate__(self): |     def __getstate__(self): | ||||||
|         self.instance = None |         self.instance = None | ||||||
| @@ -159,41 +143,40 @@ class BaseList(list): | |||||||
|         self = state |         self = state | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
|     def __iadd__(self, other): |     def __setitem__(self, key, value): | ||||||
|         self._mark_as_changed() |         changed_key = key | ||||||
|         return super(BaseList, self).__iadd__(other) |         if isinstance(key, slice): | ||||||
|  |             # In case of slice, we don't bother to identify the exact elements being updated | ||||||
|  |             # instead, we simply marks the whole list as changed | ||||||
|  |             changed_key = None | ||||||
|  |  | ||||||
|     def __imul__(self, other): |         result = super(BaseList, self).__setitem__(key, value) | ||||||
|         self._mark_as_changed() |         self._mark_as_changed(changed_key) | ||||||
|         return super(BaseList, self).__imul__(other) |         return result | ||||||
|  |  | ||||||
|     def append(self, *args, **kwargs): |     append = mark_as_changed_wrapper(list.append) | ||||||
|         self._mark_as_changed() |     extend = mark_as_changed_wrapper(list.extend) | ||||||
|         return super(BaseList, self).append(*args, **kwargs) |     insert = mark_as_changed_wrapper(list.insert) | ||||||
|  |     pop = mark_as_changed_wrapper(list.pop) | ||||||
|  |     remove = mark_as_changed_wrapper(list.remove) | ||||||
|  |     reverse = mark_as_changed_wrapper(list.reverse) | ||||||
|  |     sort = mark_as_changed_wrapper(list.sort) | ||||||
|  |     __delitem__ = mark_as_changed_wrapper(list.__delitem__) | ||||||
|  |     __iadd__ = mark_as_changed_wrapper(list.__iadd__) | ||||||
|  |     __imul__ = mark_as_changed_wrapper(list.__imul__) | ||||||
|  |  | ||||||
|     def extend(self, *args, **kwargs): |     if six.PY2: | ||||||
|         self._mark_as_changed() |         # Under py3 __setslice__, __delslice__ and __getslice__ | ||||||
|         return super(BaseList, self).extend(*args, **kwargs) |         # are replaced by __setitem__, __delitem__ and __getitem__ with a slice as parameter | ||||||
|  |         # so we mimic this under python 2 | ||||||
|  |         def __setslice__(self, i, j, sequence): | ||||||
|  |             return self.__setitem__(slice(i, j), sequence) | ||||||
|  |  | ||||||
|     def insert(self, *args, **kwargs): |         def __delslice__(self, i, j): | ||||||
|         self._mark_as_changed() |             return self.__delitem__(slice(i, j)) | ||||||
|         return super(BaseList, self).insert(*args, **kwargs) |  | ||||||
|  |  | ||||||
|     def pop(self, *args, **kwargs): |         def __getslice__(self, i, j): | ||||||
|         self._mark_as_changed() |             return self.__getitem__(slice(i, j)) | ||||||
|         return super(BaseList, self).pop(*args, **kwargs) |  | ||||||
|  |  | ||||||
|     def remove(self, *args, **kwargs): |  | ||||||
|         self._mark_as_changed() |  | ||||||
|         return super(BaseList, self).remove(*args, **kwargs) |  | ||||||
|  |  | ||||||
|     def reverse(self, *args, **kwargs): |  | ||||||
|         self._mark_as_changed() |  | ||||||
|         return super(BaseList, self).reverse() |  | ||||||
|  |  | ||||||
|     def sort(self, *args, **kwargs): |  | ||||||
|         self._mark_as_changed() |  | ||||||
|         return super(BaseList, self).sort(*args, **kwargs) |  | ||||||
|  |  | ||||||
|     def _mark_as_changed(self, key=None): |     def _mark_as_changed(self, key=None): | ||||||
|         if hasattr(self._instance, '_mark_as_changed'): |         if hasattr(self._instance, '_mark_as_changed'): | ||||||
| @@ -207,6 +190,10 @@ class BaseList(list): | |||||||
|  |  | ||||||
| class EmbeddedDocumentList(BaseList): | class EmbeddedDocumentList(BaseList): | ||||||
|  |  | ||||||
|  |     def __init__(self, list_items, instance, name): | ||||||
|  |         super(EmbeddedDocumentList, self).__init__(list_items, instance, name) | ||||||
|  |         self._instance = instance | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def __match_all(cls, embedded_doc, kwargs): |     def __match_all(cls, embedded_doc, kwargs): | ||||||
|         """Return True if a given embedded doc matches all the filter |         """Return True if a given embedded doc matches all the filter | ||||||
| @@ -225,15 +212,14 @@ class EmbeddedDocumentList(BaseList): | |||||||
|             return embedded_docs |             return embedded_docs | ||||||
|         return [doc for doc in embedded_docs if cls.__match_all(doc, kwargs)] |         return [doc for doc in embedded_docs if cls.__match_all(doc, kwargs)] | ||||||
|  |  | ||||||
|     def __init__(self, list_items, instance, name): |  | ||||||
|         super(EmbeddedDocumentList, self).__init__(list_items, instance, name) |  | ||||||
|         self._instance = instance |  | ||||||
|  |  | ||||||
|     def filter(self, **kwargs): |     def filter(self, **kwargs): | ||||||
|         """ |         """ | ||||||
|         Filters the list by only including embedded documents with the |         Filters the list by only including embedded documents with the | ||||||
|         given keyword arguments. |         given keyword arguments. | ||||||
|  |  | ||||||
|  |         This method only supports simple comparison (e.g: .filter(name='John Doe')) | ||||||
|  |         and does not support operators like __gte, __lte, __icontains like queryset.filter does | ||||||
|  |  | ||||||
|         :param kwargs: The keyword arguments corresponding to the fields to |         :param kwargs: The keyword arguments corresponding to the fields to | ||||||
|          filter on. *Multiple arguments are treated as if they are ANDed |          filter on. *Multiple arguments are treated as if they are ANDed | ||||||
|          together.* |          together.* | ||||||
| @@ -374,11 +360,11 @@ class EmbeddedDocumentList(BaseList): | |||||||
|  |  | ||||||
| class StrictDict(object): | class StrictDict(object): | ||||||
|     __slots__ = () |     __slots__ = () | ||||||
|     _special_fields = set(['get', 'pop', 'iteritems', 'items', 'keys', 'create']) |     _special_fields = {'get', 'pop', 'iteritems', 'items', 'keys', 'create'} | ||||||
|     _classes = {} |     _classes = {} | ||||||
|  |  | ||||||
|     def __init__(self, **kwargs): |     def __init__(self, **kwargs): | ||||||
|         for k, v in kwargs.iteritems(): |         for k, v in iteritems(kwargs): | ||||||
|             setattr(self, k, v) |             setattr(self, k, v) | ||||||
|  |  | ||||||
|     def __getitem__(self, key): |     def __getitem__(self, key): | ||||||
| @@ -426,7 +412,7 @@ class StrictDict(object): | |||||||
|         return (key for key in self.__slots__ if hasattr(self, key)) |         return (key for key in self.__slots__ if hasattr(self, key)) | ||||||
|  |  | ||||||
|     def __len__(self): |     def __len__(self): | ||||||
|         return len(list(self.iteritems())) |         return len(list(iteritems(self))) | ||||||
|  |  | ||||||
|     def __eq__(self, other): |     def __eq__(self, other): | ||||||
|         return self.items() == other.items() |         return self.items() == other.items() | ||||||
|   | |||||||
| @@ -1,13 +1,11 @@ | |||||||
| import copy | import copy | ||||||
| import numbers | import numbers | ||||||
| from collections import Hashable |  | ||||||
| from functools import partial | from functools import partial | ||||||
|  |  | ||||||
| from bson import ObjectId, json_util | from bson import DBRef, ObjectId, SON, json_util | ||||||
| from bson.dbref import DBRef |  | ||||||
| from bson.son import SON |  | ||||||
| import pymongo | import pymongo | ||||||
| import six | import six | ||||||
|  | from six import iteritems | ||||||
|  |  | ||||||
| from mongoengine import signals | from mongoengine import signals | ||||||
| from mongoengine.base.common import get_document | from mongoengine.base.common import get_document | ||||||
| @@ -19,6 +17,7 @@ from mongoengine.base.fields import ComplexBaseField | |||||||
| from mongoengine.common import _import_class | from mongoengine.common import _import_class | ||||||
| from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError, | from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError, | ||||||
|                                 LookUpError, OperationError, ValidationError) |                                 LookUpError, OperationError, ValidationError) | ||||||
|  | from mongoengine.python_support import Hashable | ||||||
|  |  | ||||||
| __all__ = ('BaseDocument', 'NON_FIELD_ERRORS') | __all__ = ('BaseDocument', 'NON_FIELD_ERRORS') | ||||||
|  |  | ||||||
| @@ -26,6 +25,16 @@ NON_FIELD_ERRORS = '__all__' | |||||||
|  |  | ||||||
|  |  | ||||||
| class BaseDocument(object): | class BaseDocument(object): | ||||||
|  |     # TODO simplify how `_changed_fields` is used. | ||||||
|  |     # Currently, handling of `_changed_fields` seems unnecessarily convoluted: | ||||||
|  |     # 1. `BaseDocument` defines `_changed_fields` in its `__slots__`, yet it's | ||||||
|  |     #    not setting it to `[]` (or any other value) in `__init__`. | ||||||
|  |     # 2. `EmbeddedDocument` sets `_changed_fields` to `[]` it its overloaded | ||||||
|  |     #    `__init__`. | ||||||
|  |     # 3. `Document` does NOT set `_changed_fields` upon initialization. The | ||||||
|  |     #    field is primarily set via `_from_son` or `_clear_changed_fields`, | ||||||
|  |     #    though there are also other methods that manipulate it. | ||||||
|  |     # 4. The codebase is littered with `hasattr` calls for `_changed_fields`. | ||||||
|     __slots__ = ('_changed_fields', '_initialised', '_created', '_data', |     __slots__ = ('_changed_fields', '_initialised', '_created', '_data', | ||||||
|                  '_dynamic_fields', '_auto_id_field', '_db_field_map', |                  '_dynamic_fields', '_auto_id_field', '_db_field_map', | ||||||
|                  '__weakref__') |                  '__weakref__') | ||||||
| @@ -36,13 +45,20 @@ class BaseDocument(object): | |||||||
|  |  | ||||||
|     def __init__(self, *args, **values): |     def __init__(self, *args, **values): | ||||||
|         """ |         """ | ||||||
|         Initialise a document or embedded document |         Initialise a document or an embedded document. | ||||||
|  |  | ||||||
|         :param __auto_convert: Try and will cast python objects to Object types |         :param dict values: A dictionary of keys and values for the document. | ||||||
|         :param values: A dictionary of values for the document |             It may contain additional reserved keywords, e.g. "__auto_convert". | ||||||
|  |         :param bool __auto_convert: If True, supplied values will be converted | ||||||
|  |             to Python-type values via each field's `to_python` method. | ||||||
|  |         :param set __only_fields: A set of fields that have been loaded for | ||||||
|  |             this document. Empty if all fields have been loaded. | ||||||
|  |         :param bool _created: Indicates whether this is a brand new document | ||||||
|  |             or whether it's already been persisted before. Defaults to true. | ||||||
|         """ |         """ | ||||||
|         self._initialised = False |         self._initialised = False | ||||||
|         self._created = True |         self._created = True | ||||||
|  |  | ||||||
|         if args: |         if args: | ||||||
|             # Combine positional arguments with named arguments. |             # Combine positional arguments with named arguments. | ||||||
|             # We only want named arguments. |             # We only want named arguments. | ||||||
| @@ -59,7 +75,6 @@ class BaseDocument(object): | |||||||
|  |  | ||||||
|         __auto_convert = values.pop('__auto_convert', True) |         __auto_convert = values.pop('__auto_convert', True) | ||||||
|  |  | ||||||
|         # 399: set default values only to fields loaded from DB |  | ||||||
|         __only_fields = set(values.pop('__only_fields', values)) |         __only_fields = set(values.pop('__only_fields', values)) | ||||||
|  |  | ||||||
|         _created = values.pop('_created', True) |         _created = values.pop('_created', True) | ||||||
| @@ -84,8 +99,10 @@ class BaseDocument(object): | |||||||
|  |  | ||||||
|         self._dynamic_fields = SON() |         self._dynamic_fields = SON() | ||||||
|  |  | ||||||
|         # Assign default values to instance |         # Assign default values to the instance. | ||||||
|         for key, field in self._fields.iteritems(): |         # We set default values only for fields loaded from DB. See | ||||||
|  |         # https://github.com/mongoengine/mongoengine/issues/399 for more info. | ||||||
|  |         for key, field in iteritems(self._fields): | ||||||
|             if self._db_field_map.get(key, key) in __only_fields: |             if self._db_field_map.get(key, key) in __only_fields: | ||||||
|                 continue |                 continue | ||||||
|             value = getattr(self, key, None) |             value = getattr(self, key, None) | ||||||
| @@ -97,16 +114,14 @@ class BaseDocument(object): | |||||||
|         # Set passed values after initialisation |         # Set passed values after initialisation | ||||||
|         if self._dynamic: |         if self._dynamic: | ||||||
|             dynamic_data = {} |             dynamic_data = {} | ||||||
|             for key, value in values.iteritems(): |             for key, value in iteritems(values): | ||||||
|                 if key in self._fields or key == '_id': |                 if key in self._fields or key == '_id': | ||||||
|                     setattr(self, key, value) |                     setattr(self, key, value) | ||||||
|                 elif self._dynamic: |                 else: | ||||||
|                     dynamic_data[key] = value |                     dynamic_data[key] = value | ||||||
|         else: |         else: | ||||||
|             FileField = _import_class('FileField') |             FileField = _import_class('FileField') | ||||||
|             for key, value in values.iteritems(): |             for key, value in iteritems(values): | ||||||
|                 if key == '__auto_convert': |  | ||||||
|                     continue |  | ||||||
|                 key = self._reverse_db_field_map.get(key, key) |                 key = self._reverse_db_field_map.get(key, key) | ||||||
|                 if key in self._fields or key in ('id', 'pk', '_cls'): |                 if key in self._fields or key in ('id', 'pk', '_cls'): | ||||||
|                     if __auto_convert and value is not None: |                     if __auto_convert and value is not None: | ||||||
| @@ -122,12 +137,13 @@ class BaseDocument(object): | |||||||
|  |  | ||||||
|         if self._dynamic: |         if self._dynamic: | ||||||
|             self._dynamic_lock = False |             self._dynamic_lock = False | ||||||
|             for key, value in dynamic_data.iteritems(): |             for key, value in iteritems(dynamic_data): | ||||||
|                 setattr(self, key, value) |                 setattr(self, key, value) | ||||||
|  |  | ||||||
|         # Flag initialised |         # Flag initialised | ||||||
|         self._initialised = True |         self._initialised = True | ||||||
|         self._created = _created |         self._created = _created | ||||||
|  |  | ||||||
|         signals.post_init.send(self.__class__, document=self) |         signals.post_init.send(self.__class__, document=self) | ||||||
|  |  | ||||||
|     def __delattr__(self, *args, **kwargs): |     def __delattr__(self, *args, **kwargs): | ||||||
| @@ -296,15 +312,14 @@ class BaseDocument(object): | |||||||
|         """ |         """ | ||||||
|         Return as SON data ready for use with MongoDB. |         Return as SON data ready for use with MongoDB. | ||||||
|         """ |         """ | ||||||
|         if not fields: |         fields = fields or [] | ||||||
|             fields = [] |  | ||||||
|  |  | ||||||
|         data = SON() |         data = SON() | ||||||
|         data['_id'] = None |         data['_id'] = None | ||||||
|         data['_cls'] = self._class_name |         data['_cls'] = self._class_name | ||||||
|  |  | ||||||
|         # only root fields ['test1.a', 'test2'] => ['test1', 'test2'] |         # only root fields ['test1.a', 'test2'] => ['test1', 'test2'] | ||||||
|         root_fields = set([f.split('.')[0] for f in fields]) |         root_fields = {f.split('.')[0] for f in fields} | ||||||
|  |  | ||||||
|         for field_name in self: |         for field_name in self: | ||||||
|             if root_fields and field_name not in root_fields: |             if root_fields and field_name not in root_fields: | ||||||
| @@ -352,6 +367,9 @@ class BaseDocument(object): | |||||||
|     def validate(self, clean=True): |     def validate(self, clean=True): | ||||||
|         """Ensure that all fields' values are valid and that required fields |         """Ensure that all fields' values are valid and that required fields | ||||||
|         are present. |         are present. | ||||||
|  |  | ||||||
|  |         Raises :class:`ValidationError` if any of the fields' values are found | ||||||
|  |         to be invalid. | ||||||
|         """ |         """ | ||||||
|         # Ensure that each field is matched to a valid value |         # Ensure that each field is matched to a valid value | ||||||
|         errors = {} |         errors = {} | ||||||
| @@ -406,7 +424,15 @@ class BaseDocument(object): | |||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def from_json(cls, json_data, created=False): |     def from_json(cls, json_data, created=False): | ||||||
|         """Converts json data to an unsaved document instance""" |         """Converts json data to a Document instance | ||||||
|  |  | ||||||
|  |         :param json_data: The json data to load into the Document | ||||||
|  |         :param created: If True, the document will be considered as a brand new document | ||||||
|  |                         If False and an id is provided, it will consider that the data being | ||||||
|  |                         loaded corresponds to what's already in the database (This has an impact of subsequent call to .save()) | ||||||
|  |                         If False and no id is provided, it will consider the data as a new document | ||||||
|  |                         (default ``False``) | ||||||
|  |         """ | ||||||
|         return cls._from_son(json_util.loads(json_data), created=created) |         return cls._from_son(json_util.loads(json_data), created=created) | ||||||
|  |  | ||||||
|     def __expand_dynamic_values(self, name, value): |     def __expand_dynamic_values(self, name, value): | ||||||
| @@ -497,76 +523,74 @@ class BaseDocument(object): | |||||||
|  |  | ||||||
|         self._changed_fields = [] |         self._changed_fields = [] | ||||||
|  |  | ||||||
|     def _nestable_types_changed_fields(self, changed_fields, key, data, inspected): |     def _nestable_types_changed_fields(self, changed_fields, base_key, data): | ||||||
|  |         """Inspect nested data for changed fields | ||||||
|  |  | ||||||
|  |         :param changed_fields: Previously collected changed fields | ||||||
|  |         :param base_key: The base key that must be used to prepend changes to this data | ||||||
|  |         :param data: data to inspect for changes | ||||||
|  |         """ | ||||||
|         # Loop list / dict fields as they contain documents |         # Loop list / dict fields as they contain documents | ||||||
|         # Determine the iterator to use |         # Determine the iterator to use | ||||||
|         if not hasattr(data, 'items'): |         if not hasattr(data, 'items'): | ||||||
|             iterator = enumerate(data) |             iterator = enumerate(data) | ||||||
|         else: |         else: | ||||||
|             iterator = data.iteritems() |             iterator = iteritems(data) | ||||||
|  |  | ||||||
|         for index, value in iterator: |         for index_or_key, value in iterator: | ||||||
|             list_key = '%s%s.' % (key, index) |             item_key = '%s%s.' % (base_key, index_or_key) | ||||||
|             # don't check anything lower if this key is already marked |             # don't check anything lower if this key is already marked | ||||||
|             # as changed. |             # as changed. | ||||||
|             if list_key[:-1] in changed_fields: |             if item_key[:-1] in changed_fields: | ||||||
|                 continue |                 continue | ||||||
|  |  | ||||||
|             if hasattr(value, '_get_changed_fields'): |             if hasattr(value, '_get_changed_fields'): | ||||||
|                 changed = value._get_changed_fields(inspected) |                 changed = value._get_changed_fields() | ||||||
|                 changed_fields += ['%s%s' % (list_key, k) |                 changed_fields += ['%s%s' % (item_key, k) for k in changed if k] | ||||||
|                                    for k in changed if k] |  | ||||||
|             elif isinstance(value, (list, tuple, dict)): |             elif isinstance(value, (list, tuple, dict)): | ||||||
|                 self._nestable_types_changed_fields( |                 self._nestable_types_changed_fields( | ||||||
|                     changed_fields, list_key, value, inspected) |                     changed_fields, item_key, value) | ||||||
|  |  | ||||||
|     def _get_changed_fields(self, inspected=None): |     def _get_changed_fields(self): | ||||||
|         """Return a list of all fields that have explicitly been changed. |         """Return a list of all fields that have explicitly been changed. | ||||||
|         """ |         """ | ||||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') |         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||||
|         DynamicEmbeddedDocument = _import_class('DynamicEmbeddedDocument') |  | ||||||
|         ReferenceField = _import_class('ReferenceField') |         ReferenceField = _import_class('ReferenceField') | ||||||
|  |         GenericReferenceField = _import_class('GenericReferenceField') | ||||||
|         SortedListField = _import_class('SortedListField') |         SortedListField = _import_class('SortedListField') | ||||||
|  |  | ||||||
|         changed_fields = [] |         changed_fields = [] | ||||||
|         changed_fields += getattr(self, '_changed_fields', []) |         changed_fields += getattr(self, '_changed_fields', []) | ||||||
|  |  | ||||||
|         inspected = inspected or set() |  | ||||||
|         if hasattr(self, 'id') and isinstance(self.id, Hashable): |  | ||||||
|             if self.id in inspected: |  | ||||||
|                 return changed_fields |  | ||||||
|             inspected.add(self.id) |  | ||||||
|  |  | ||||||
|         for field_name in self._fields_ordered: |         for field_name in self._fields_ordered: | ||||||
|             db_field_name = self._db_field_map.get(field_name, field_name) |             db_field_name = self._db_field_map.get(field_name, field_name) | ||||||
|             key = '%s.' % db_field_name |             key = '%s.' % db_field_name | ||||||
|             data = self._data.get(field_name, None) |             data = self._data.get(field_name, None) | ||||||
|             field = self._fields.get(field_name) |             field = self._fields.get(field_name) | ||||||
|  |  | ||||||
|             if hasattr(data, 'id'): |             if db_field_name in changed_fields: | ||||||
|                 if data.id in inspected: |                 # Whole field already marked as changed, no need to go further | ||||||
|                     continue |  | ||||||
|             if isinstance(field, ReferenceField): |  | ||||||
|                 continue |                 continue | ||||||
|             elif ( |  | ||||||
|                 isinstance(data, (EmbeddedDocument, DynamicEmbeddedDocument)) and |             if isinstance(field, ReferenceField):   # Don't follow referenced documents | ||||||
|                 db_field_name not in changed_fields |                 continue | ||||||
|             ): |  | ||||||
|  |             if isinstance(data, EmbeddedDocument): | ||||||
|                 # Find all embedded fields that have been changed |                 # Find all embedded fields that have been changed | ||||||
|                 changed = data._get_changed_fields(inspected) |                 changed = data._get_changed_fields() | ||||||
|                 changed_fields += ['%s%s' % (key, k) for k in changed if k] |                 changed_fields += ['%s%s' % (key, k) for k in changed if k] | ||||||
|             elif (isinstance(data, (list, tuple, dict)) and |             elif isinstance(data, (list, tuple, dict)): | ||||||
|                     db_field_name not in changed_fields): |  | ||||||
|                 if (hasattr(field, 'field') and |                 if (hasattr(field, 'field') and | ||||||
|                         isinstance(field.field, ReferenceField)): |                         isinstance(field.field, (ReferenceField, GenericReferenceField))): | ||||||
|                     continue |                     continue | ||||||
|                 elif isinstance(field, SortedListField) and field._ordering: |                 elif isinstance(field, SortedListField) and field._ordering: | ||||||
|                     # if ordering is affected whole list is changed |                     # if ordering is affected whole list is changed | ||||||
|                     if any(map(lambda d: field._ordering in d._changed_fields, data)): |                     if any(field._ordering in d._changed_fields for d in data): | ||||||
|                         changed_fields.append(db_field_name) |                         changed_fields.append(db_field_name) | ||||||
|                         continue |                         continue | ||||||
|  |  | ||||||
|                 self._nestable_types_changed_fields( |                 self._nestable_types_changed_fields( | ||||||
|                     changed_fields, key, data, inspected) |                     changed_fields, key, data) | ||||||
|         return changed_fields |         return changed_fields | ||||||
|  |  | ||||||
|     def _delta(self): |     def _delta(self): | ||||||
| @@ -578,7 +602,6 @@ class BaseDocument(object): | |||||||
|  |  | ||||||
|         set_fields = self._get_changed_fields() |         set_fields = self._get_changed_fields() | ||||||
|         unset_data = {} |         unset_data = {} | ||||||
|         parts = [] |  | ||||||
|         if hasattr(self, '_changed_fields'): |         if hasattr(self, '_changed_fields'): | ||||||
|             set_data = {} |             set_data = {} | ||||||
|             # Fetch each set item from its path |             # Fetch each set item from its path | ||||||
| @@ -588,15 +611,13 @@ class BaseDocument(object): | |||||||
|                 new_path = [] |                 new_path = [] | ||||||
|                 for p in parts: |                 for p in parts: | ||||||
|                     if isinstance(d, (ObjectId, DBRef)): |                     if isinstance(d, (ObjectId, DBRef)): | ||||||
|  |                         # Don't dig in the references | ||||||
|                         break |                         break | ||||||
|                     elif isinstance(d, list) and p.lstrip('-').isdigit(): |                     elif isinstance(d, list) and p.isdigit(): | ||||||
|                         if p[0] == '-': |                         # An item of a list (identified by its index) is updated | ||||||
|                             p = str(len(d) + int(p)) |                         d = d[int(p)] | ||||||
|                         try: |  | ||||||
|                             d = d[int(p)] |  | ||||||
|                         except IndexError: |  | ||||||
|                             d = None |  | ||||||
|                     elif hasattr(d, 'get'): |                     elif hasattr(d, 'get'): | ||||||
|  |                         # dict-like (dict, embedded document) | ||||||
|                         d = d.get(p) |                         d = d.get(p) | ||||||
|                     new_path.append(p) |                     new_path.append(p) | ||||||
|                 path = '.'.join(new_path) |                 path = '.'.join(new_path) | ||||||
| @@ -608,26 +629,26 @@ class BaseDocument(object): | |||||||
|  |  | ||||||
|         # Determine if any changed items were actually unset. |         # Determine if any changed items were actually unset. | ||||||
|         for path, value in set_data.items(): |         for path, value in set_data.items(): | ||||||
|             if value or isinstance(value, (numbers.Number, bool)): |             if value or isinstance(value, (numbers.Number, bool)):  # Account for 0 and True that are truthy | ||||||
|                 continue |                 continue | ||||||
|  |  | ||||||
|             # If we've set a value that ain't the default value don't unset it. |             parts = path.split('.') | ||||||
|             default = None |  | ||||||
|             if (self._dynamic and len(parts) and parts[0] in |             if (self._dynamic and len(parts) and parts[0] in | ||||||
|                     self._dynamic_fields): |                     self._dynamic_fields): | ||||||
|                 del set_data[path] |                 del set_data[path] | ||||||
|                 unset_data[path] = 1 |                 unset_data[path] = 1 | ||||||
|                 continue |                 continue | ||||||
|             elif path in self._fields: |  | ||||||
|  |             # If we've set a value that ain't the default value don't unset it. | ||||||
|  |             default = None | ||||||
|  |             if path in self._fields: | ||||||
|                 default = self._fields[path].default |                 default = self._fields[path].default | ||||||
|             else:  # Perform a full lookup for lists / embedded lookups |             else:  # Perform a full lookup for lists / embedded lookups | ||||||
|                 d = self |                 d = self | ||||||
|                 parts = path.split('.') |  | ||||||
|                 db_field_name = parts.pop() |                 db_field_name = parts.pop() | ||||||
|                 for p in parts: |                 for p in parts: | ||||||
|                     if isinstance(d, list) and p.lstrip('-').isdigit(): |                     if isinstance(d, list) and p.isdigit(): | ||||||
|                         if p[0] == '-': |  | ||||||
|                             p = str(len(d) + int(p)) |  | ||||||
|                         d = d[int(p)] |                         d = d[int(p)] | ||||||
|                     elif (hasattr(d, '__getattribute__') and |                     elif (hasattr(d, '__getattribute__') and | ||||||
|                           not isinstance(d, dict)): |                           not isinstance(d, dict)): | ||||||
| @@ -645,10 +666,9 @@ class BaseDocument(object): | |||||||
|                         default = None |                         default = None | ||||||
|  |  | ||||||
|             if default is not None: |             if default is not None: | ||||||
|                 if callable(default): |                 default = default() if callable(default) else default | ||||||
|                     default = default() |  | ||||||
|  |  | ||||||
|             if default != value: |             if value != default: | ||||||
|                 continue |                 continue | ||||||
|  |  | ||||||
|             del set_data[path] |             del set_data[path] | ||||||
| @@ -664,9 +684,7 @@ class BaseDocument(object): | |||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def _from_son(cls, son, _auto_dereference=True, only_fields=None, created=False): |     def _from_son(cls, son, _auto_dereference=True, only_fields=None, created=False): | ||||||
|         """Create an instance of a Document (subclass) from a PyMongo |         """Create an instance of a Document (subclass) from a PyMongo SON.""" | ||||||
|         SON. |  | ||||||
|         """ |  | ||||||
|         if not only_fields: |         if not only_fields: | ||||||
|             only_fields = [] |             only_fields = [] | ||||||
|  |  | ||||||
| @@ -680,7 +698,7 @@ class BaseDocument(object): | |||||||
|         # Convert SON to a data dict, making sure each key is a string and |         # Convert SON to a data dict, making sure each key is a string and | ||||||
|         # corresponds to the right db field. |         # corresponds to the right db field. | ||||||
|         data = {} |         data = {} | ||||||
|         for key, value in son.iteritems(): |         for key, value in iteritems(son): | ||||||
|             key = str(key) |             key = str(key) | ||||||
|             key = cls._db_field_map.get(key, key) |             key = cls._db_field_map.get(key, key) | ||||||
|             data[key] = value |             data[key] = value | ||||||
| @@ -689,14 +707,13 @@ class BaseDocument(object): | |||||||
|         if class_name != cls._class_name: |         if class_name != cls._class_name: | ||||||
|             cls = get_document(class_name) |             cls = get_document(class_name) | ||||||
|  |  | ||||||
|         changed_fields = [] |  | ||||||
|         errors_dict = {} |         errors_dict = {} | ||||||
|  |  | ||||||
|         fields = cls._fields |         fields = cls._fields | ||||||
|         if not _auto_dereference: |         if not _auto_dereference: | ||||||
|             fields = copy.copy(fields) |             fields = copy.deepcopy(fields) | ||||||
|  |  | ||||||
|         for field_name, field in fields.iteritems(): |         for field_name, field in iteritems(fields): | ||||||
|             field._auto_dereference = _auto_dereference |             field._auto_dereference = _auto_dereference | ||||||
|             if field.db_field in data: |             if field.db_field in data: | ||||||
|                 value = data[field.db_field] |                 value = data[field.db_field] | ||||||
| @@ -717,10 +734,15 @@ class BaseDocument(object): | |||||||
|  |  | ||||||
|         # In STRICT documents, remove any keys that aren't in cls._fields |         # In STRICT documents, remove any keys that aren't in cls._fields | ||||||
|         if cls.STRICT: |         if cls.STRICT: | ||||||
|             data = {k: v for k, v in data.iteritems() if k in cls._fields} |             data = {k: v for k, v in iteritems(data) if k in cls._fields} | ||||||
|  |  | ||||||
|         obj = cls(__auto_convert=False, _created=created, __only_fields=only_fields, **data) |         obj = cls( | ||||||
|         obj._changed_fields = changed_fields |             __auto_convert=False, | ||||||
|  |             _created=created, | ||||||
|  |             __only_fields=only_fields, | ||||||
|  |             **data | ||||||
|  |         ) | ||||||
|  |         obj._changed_fields = [] | ||||||
|         if not _auto_dereference: |         if not _auto_dereference: | ||||||
|             obj._fields = fields |             obj._fields = fields | ||||||
|  |  | ||||||
| @@ -884,7 +906,8 @@ class BaseDocument(object): | |||||||
|                 index = {'fields': fields, 'unique': True, 'sparse': sparse} |                 index = {'fields': fields, 'unique': True, 'sparse': sparse} | ||||||
|                 unique_indexes.append(index) |                 unique_indexes.append(index) | ||||||
|  |  | ||||||
|             if field.__class__.__name__ == 'ListField': |             if field.__class__.__name__ in {'EmbeddedDocumentListField', | ||||||
|  |                                             'ListField', 'SortedListField'}: | ||||||
|                 field = field.field |                 field = field.field | ||||||
|  |  | ||||||
|             # Grab any embedded document field unique indexes |             # Grab any embedded document field unique indexes | ||||||
| @@ -1085,6 +1108,6 @@ class BaseDocument(object): | |||||||
|             sep = getattr(field, 'display_sep', ' ') |             sep = getattr(field, 'display_sep', ' ') | ||||||
|             values = value if field.__class__.__name__ in ('ListField', 'SortedListField') else [value] |             values = value if field.__class__.__name__ in ('ListField', 'SortedListField') else [value] | ||||||
|             return sep.join([ |             return sep.join([ | ||||||
|                 dict(field.choices).get(val, val) |                 six.text_type(dict(field.choices).get(val, val)) | ||||||
|                 for val in values or []]) |                 for val in values or []]) | ||||||
|         return value |         return value | ||||||
|   | |||||||
| @@ -5,13 +5,13 @@ import weakref | |||||||
| from bson import DBRef, ObjectId, SON | from bson import DBRef, ObjectId, SON | ||||||
| import pymongo | import pymongo | ||||||
| import six | import six | ||||||
|  | from six import iteritems | ||||||
|  |  | ||||||
| from mongoengine.base.common import UPDATE_OPERATORS | from mongoengine.base.common import UPDATE_OPERATORS | ||||||
| from mongoengine.base.datastructures import (BaseDict, BaseList, | from mongoengine.base.datastructures import (BaseDict, BaseList, | ||||||
|                                              EmbeddedDocumentList) |                                              EmbeddedDocumentList) | ||||||
| from mongoengine.common import _import_class | from mongoengine.common import _import_class | ||||||
| from mongoengine.errors import ValidationError | from mongoengine.errors import DeprecatedError, ValidationError | ||||||
|  |  | ||||||
|  |  | ||||||
| __all__ = ('BaseField', 'ComplexBaseField', 'ObjectIdField', | __all__ = ('BaseField', 'ComplexBaseField', 'ObjectIdField', | ||||||
|            'GeoJsonBaseField') |            'GeoJsonBaseField') | ||||||
| @@ -52,10 +52,10 @@ class BaseField(object): | |||||||
|             unique with. |             unique with. | ||||||
|         :param primary_key: Mark this field as the primary key. Defaults to False. |         :param primary_key: Mark this field as the primary key. Defaults to False. | ||||||
|         :param validation: (optional) A callable to validate the value of the |         :param validation: (optional) A callable to validate the value of the | ||||||
|             field.  Generally this is deprecated in favour of the |             field.  The callable takes the value as parameter and should raise | ||||||
|             `FIELD.validate` method |             a ValidationError if validation fails | ||||||
|         :param choices: (optional) The valid choices |         :param choices: (optional) The valid choices | ||||||
|         :param null: (optional) Is the field value can be null. If no and there is a default value |         :param null: (optional) If the field value can be null. If no and there is a default value | ||||||
|             then the default value is set |             then the default value is set | ||||||
|         :param sparse: (optional) `sparse=True` combined with `unique=True` and `required=False` |         :param sparse: (optional) `sparse=True` combined with `unique=True` and `required=False` | ||||||
|             means that uniqueness won't be enforced for `None` values |             means that uniqueness won't be enforced for `None` values | ||||||
| @@ -130,7 +130,6 @@ class BaseField(object): | |||||||
|     def __set__(self, instance, value): |     def __set__(self, instance, value): | ||||||
|         """Descriptor for assigning a value to a field in a document. |         """Descriptor for assigning a value to a field in a document. | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         # If setting to None and there is a default |         # If setting to None and there is a default | ||||||
|         # Then set the value to the default value |         # Then set the value to the default value | ||||||
|         if value is None: |         if value is None: | ||||||
| @@ -226,10 +225,18 @@ class BaseField(object): | |||||||
|         # check validation argument |         # check validation argument | ||||||
|         if self.validation is not None: |         if self.validation is not None: | ||||||
|             if callable(self.validation): |             if callable(self.validation): | ||||||
|                 if not self.validation(value): |                 try: | ||||||
|                     self.error('Value does not match custom validation method') |                     # breaking change of 0.18 | ||||||
|  |                     # Get rid of True/False-type return for the validation method | ||||||
|  |                     # in favor of having validation raising a ValidationError | ||||||
|  |                     ret = self.validation(value) | ||||||
|  |                     if ret is not None: | ||||||
|  |                         raise DeprecatedError('validation argument for `%s` must not return anything, ' | ||||||
|  |                                               'it should raise a ValidationError if validation fails' % self.name) | ||||||
|  |                 except ValidationError as ex: | ||||||
|  |                     self.error(str(ex)) | ||||||
|             else: |             else: | ||||||
|                 raise ValueError('validation argument for "%s" must be a ' |                 raise ValueError('validation argument for `"%s"` must be a ' | ||||||
|                                  'callable.' % self.name) |                                  'callable.' % self.name) | ||||||
|  |  | ||||||
|         self.validate(value, **kwargs) |         self.validate(value, **kwargs) | ||||||
| @@ -267,18 +274,25 @@ class ComplexBaseField(BaseField): | |||||||
|         ReferenceField = _import_class('ReferenceField') |         ReferenceField = _import_class('ReferenceField') | ||||||
|         GenericReferenceField = _import_class('GenericReferenceField') |         GenericReferenceField = _import_class('GenericReferenceField') | ||||||
|         EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') |         EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') | ||||||
|         dereference = (self._auto_dereference and |  | ||||||
|  |         auto_dereference = instance._fields[self.name]._auto_dereference | ||||||
|  |  | ||||||
|  |         dereference = (auto_dereference and | ||||||
|                        (self.field is None or isinstance(self.field, |                        (self.field is None or isinstance(self.field, | ||||||
|                                                          (GenericReferenceField, ReferenceField)))) |                                                          (GenericReferenceField, ReferenceField)))) | ||||||
|  |  | ||||||
|         _dereference = _import_class('DeReference')() |         _dereference = _import_class('DeReference')() | ||||||
|  |  | ||||||
|         self._auto_dereference = instance._fields[self.name]._auto_dereference |         if (instance._initialised and | ||||||
|         if instance._initialised and dereference and instance._data.get(self.name): |                 dereference and | ||||||
|  |                 instance._data.get(self.name) and | ||||||
|  |                 not getattr(instance._data[self.name], '_dereferenced', False)): | ||||||
|             instance._data[self.name] = _dereference( |             instance._data[self.name] = _dereference( | ||||||
|                 instance._data.get(self.name), max_depth=1, instance=instance, |                 instance._data.get(self.name), max_depth=1, instance=instance, | ||||||
|                 name=self.name |                 name=self.name | ||||||
|             ) |             ) | ||||||
|  |             if hasattr(instance._data[self.name], '_dereferenced'): | ||||||
|  |                 instance._data[self.name]._dereferenced = True | ||||||
|  |  | ||||||
|         value = super(ComplexBaseField, self).__get__(instance, owner) |         value = super(ComplexBaseField, self).__get__(instance, owner) | ||||||
|  |  | ||||||
| @@ -294,7 +308,7 @@ class ComplexBaseField(BaseField): | |||||||
|             value = BaseDict(value, instance, self.name) |             value = BaseDict(value, instance, self.name) | ||||||
|             instance._data[self.name] = value |             instance._data[self.name] = value | ||||||
|  |  | ||||||
|         if (self._auto_dereference and instance._initialised and |         if (auto_dereference and instance._initialised and | ||||||
|                 isinstance(value, (BaseList, BaseDict)) and |                 isinstance(value, (BaseList, BaseDict)) and | ||||||
|                 not value._dereferenced): |                 not value._dereferenced): | ||||||
|             value = _dereference( |             value = _dereference( | ||||||
| @@ -313,11 +327,16 @@ class ComplexBaseField(BaseField): | |||||||
|         if hasattr(value, 'to_python'): |         if hasattr(value, 'to_python'): | ||||||
|             return value.to_python() |             return value.to_python() | ||||||
|  |  | ||||||
|  |         BaseDocument = _import_class('BaseDocument') | ||||||
|  |         if isinstance(value, BaseDocument): | ||||||
|  |             # Something is wrong, return the value as it is | ||||||
|  |             return value | ||||||
|  |  | ||||||
|         is_list = False |         is_list = False | ||||||
|         if not hasattr(value, 'items'): |         if not hasattr(value, 'items'): | ||||||
|             try: |             try: | ||||||
|                 is_list = True |                 is_list = True | ||||||
|                 value = {k: v for k, v in enumerate(value)} |                 value = {idx: v for idx, v in enumerate(value)} | ||||||
|             except TypeError:  # Not iterable return the value |             except TypeError:  # Not iterable return the value | ||||||
|                 return value |                 return value | ||||||
|  |  | ||||||
| @@ -376,11 +395,11 @@ class ComplexBaseField(BaseField): | |||||||
|         if self.field: |         if self.field: | ||||||
|             value_dict = { |             value_dict = { | ||||||
|                 key: self.field._to_mongo_safe_call(item, use_db_field, fields) |                 key: self.field._to_mongo_safe_call(item, use_db_field, fields) | ||||||
|                 for key, item in value.iteritems() |                 for key, item in iteritems(value) | ||||||
|             } |             } | ||||||
|         else: |         else: | ||||||
|             value_dict = {} |             value_dict = {} | ||||||
|             for k, v in value.iteritems(): |             for k, v in iteritems(value): | ||||||
|                 if isinstance(v, Document): |                 if isinstance(v, Document): | ||||||
|                     # We need the id from the saved object to create the DBRef |                     # We need the id from the saved object to create the DBRef | ||||||
|                     if v.pk is None: |                     if v.pk is None: | ||||||
| @@ -417,7 +436,7 @@ class ComplexBaseField(BaseField): | |||||||
|         errors = {} |         errors = {} | ||||||
|         if self.field: |         if self.field: | ||||||
|             if hasattr(value, 'iteritems') or hasattr(value, 'items'): |             if hasattr(value, 'iteritems') or hasattr(value, 'items'): | ||||||
|                 sequence = value.iteritems() |                 sequence = iteritems(value) | ||||||
|             else: |             else: | ||||||
|                 sequence = enumerate(value) |                 sequence = enumerate(value) | ||||||
|             for k, v in sequence: |             for k, v in sequence: | ||||||
| @@ -502,7 +521,7 @@ class GeoJsonBaseField(BaseField): | |||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         """Validate the GeoJson object based on its type.""" |         """Validate the GeoJson object based on its type.""" | ||||||
|         if isinstance(value, dict): |         if isinstance(value, dict): | ||||||
|             if set(value.keys()) == set(['type', 'coordinates']): |             if set(value.keys()) == {'type', 'coordinates'}: | ||||||
|                 if value['type'] != self._type: |                 if value['type'] != self._type: | ||||||
|                     self.error('%s type must be "%s"' % |                     self.error('%s type must be "%s"' % | ||||||
|                                (self._name, self._type)) |                                (self._name, self._type)) | ||||||
|   | |||||||
| @@ -1,6 +1,7 @@ | |||||||
| import warnings | import warnings | ||||||
|  |  | ||||||
| import six | import six | ||||||
|  | from six import iteritems, itervalues | ||||||
|  |  | ||||||
| from mongoengine.base.common import _document_registry | from mongoengine.base.common import _document_registry | ||||||
| from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField | from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField | ||||||
| @@ -18,14 +19,14 @@ class DocumentMetaclass(type): | |||||||
|     """Metaclass for all documents.""" |     """Metaclass for all documents.""" | ||||||
|  |  | ||||||
|     # TODO lower complexity of this method |     # TODO lower complexity of this method | ||||||
|     def __new__(cls, name, bases, attrs): |     def __new__(mcs, name, bases, attrs): | ||||||
|         flattened_bases = cls._get_bases(bases) |         flattened_bases = mcs._get_bases(bases) | ||||||
|         super_new = super(DocumentMetaclass, cls).__new__ |         super_new = super(DocumentMetaclass, mcs).__new__ | ||||||
|  |  | ||||||
|         # If a base class just call super |         # If a base class just call super | ||||||
|         metaclass = attrs.get('my_metaclass') |         metaclass = attrs.get('my_metaclass') | ||||||
|         if metaclass and issubclass(metaclass, DocumentMetaclass): |         if metaclass and issubclass(metaclass, DocumentMetaclass): | ||||||
|             return super_new(cls, name, bases, attrs) |             return super_new(mcs, name, bases, attrs) | ||||||
|  |  | ||||||
|         attrs['_is_document'] = attrs.get('_is_document', False) |         attrs['_is_document'] = attrs.get('_is_document', False) | ||||||
|         attrs['_cached_reference_fields'] = [] |         attrs['_cached_reference_fields'] = [] | ||||||
| @@ -62,7 +63,7 @@ class DocumentMetaclass(type): | |||||||
|             # Standard object mixin - merge in any Fields |             # Standard object mixin - merge in any Fields | ||||||
|             if not hasattr(base, '_meta'): |             if not hasattr(base, '_meta'): | ||||||
|                 base_fields = {} |                 base_fields = {} | ||||||
|                 for attr_name, attr_value in base.__dict__.iteritems(): |                 for attr_name, attr_value in iteritems(base.__dict__): | ||||||
|                     if not isinstance(attr_value, BaseField): |                     if not isinstance(attr_value, BaseField): | ||||||
|                         continue |                         continue | ||||||
|                     attr_value.name = attr_name |                     attr_value.name = attr_name | ||||||
| @@ -74,7 +75,7 @@ class DocumentMetaclass(type): | |||||||
|  |  | ||||||
|         # Discover any document fields |         # Discover any document fields | ||||||
|         field_names = {} |         field_names = {} | ||||||
|         for attr_name, attr_value in attrs.iteritems(): |         for attr_name, attr_value in iteritems(attrs): | ||||||
|             if not isinstance(attr_value, BaseField): |             if not isinstance(attr_value, BaseField): | ||||||
|                 continue |                 continue | ||||||
|             attr_value.name = attr_name |             attr_value.name = attr_name | ||||||
| @@ -103,7 +104,7 @@ class DocumentMetaclass(type): | |||||||
|  |  | ||||||
|         attrs['_fields_ordered'] = tuple(i[1] for i in sorted( |         attrs['_fields_ordered'] = tuple(i[1] for i in sorted( | ||||||
|                                          (v.creation_counter, v.name) |                                          (v.creation_counter, v.name) | ||||||
|                                          for v in doc_fields.itervalues())) |                                          for v in itervalues(doc_fields))) | ||||||
|  |  | ||||||
|         # |         # | ||||||
|         # Set document hierarchy |         # Set document hierarchy | ||||||
| @@ -121,7 +122,8 @@ class DocumentMetaclass(type): | |||||||
|                 # inheritance of classes where inheritance is set to False |                 # inheritance of classes where inheritance is set to False | ||||||
|                 allow_inheritance = base._meta.get('allow_inheritance') |                 allow_inheritance = base._meta.get('allow_inheritance') | ||||||
|                 if not allow_inheritance and not base._meta.get('abstract'): |                 if not allow_inheritance and not base._meta.get('abstract'): | ||||||
|                     raise ValueError('Document %s may not be subclassed' % |                     raise ValueError('Document %s may not be subclassed. ' | ||||||
|  |                                      'To enable inheritance, use the "allow_inheritance" meta attribute.' % | ||||||
|                                      base.__name__) |                                      base.__name__) | ||||||
|  |  | ||||||
|         # Get superclasses from last base superclass |         # Get superclasses from last base superclass | ||||||
| @@ -138,7 +140,7 @@ class DocumentMetaclass(type): | |||||||
|         attrs['_types'] = attrs['_subclasses']  # TODO depreciate _types |         attrs['_types'] = attrs['_subclasses']  # TODO depreciate _types | ||||||
|  |  | ||||||
|         # Create the new_class |         # Create the new_class | ||||||
|         new_class = super_new(cls, name, bases, attrs) |         new_class = super_new(mcs, name, bases, attrs) | ||||||
|  |  | ||||||
|         # Set _subclasses |         # Set _subclasses | ||||||
|         for base in document_bases: |         for base in document_bases: | ||||||
| @@ -147,7 +149,7 @@ class DocumentMetaclass(type): | |||||||
|             base._types = base._subclasses  # TODO depreciate _types |             base._types = base._subclasses  # TODO depreciate _types | ||||||
|  |  | ||||||
|         (Document, EmbeddedDocument, DictField, |         (Document, EmbeddedDocument, DictField, | ||||||
|          CachedReferenceField) = cls._import_classes() |          CachedReferenceField) = mcs._import_classes() | ||||||
|  |  | ||||||
|         if issubclass(new_class, Document): |         if issubclass(new_class, Document): | ||||||
|             new_class._collection = None |             new_class._collection = None | ||||||
| @@ -172,7 +174,7 @@ class DocumentMetaclass(type): | |||||||
|                         f.__dict__.update({'im_self': getattr(f, '__self__')}) |                         f.__dict__.update({'im_self': getattr(f, '__self__')}) | ||||||
|  |  | ||||||
|         # Handle delete rules |         # Handle delete rules | ||||||
|         for field in new_class._fields.itervalues(): |         for field in itervalues(new_class._fields): | ||||||
|             f = field |             f = field | ||||||
|             if f.owner_document is None: |             if f.owner_document is None: | ||||||
|                 f.owner_document = new_class |                 f.owner_document = new_class | ||||||
| @@ -182,9 +184,6 @@ class DocumentMetaclass(type): | |||||||
|                 if issubclass(new_class, EmbeddedDocument): |                 if issubclass(new_class, EmbeddedDocument): | ||||||
|                     raise InvalidDocumentError('CachedReferenceFields is not ' |                     raise InvalidDocumentError('CachedReferenceFields is not ' | ||||||
|                                                'allowed in EmbeddedDocuments') |                                                'allowed in EmbeddedDocuments') | ||||||
|                 if not f.document_type: |  | ||||||
|                     raise InvalidDocumentError( |  | ||||||
|                         'Document is not available to sync') |  | ||||||
|  |  | ||||||
|                 if f.auto_sync: |                 if f.auto_sync: | ||||||
|                     f.start_listener() |                     f.start_listener() | ||||||
| @@ -219,29 +218,26 @@ class DocumentMetaclass(type): | |||||||
|  |  | ||||||
|         return new_class |         return new_class | ||||||
|  |  | ||||||
|     def add_to_class(self, name, value): |  | ||||||
|         setattr(self, name, value) |  | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def _get_bases(cls, bases): |     def _get_bases(mcs, bases): | ||||||
|         if isinstance(bases, BasesTuple): |         if isinstance(bases, BasesTuple): | ||||||
|             return bases |             return bases | ||||||
|         seen = [] |         seen = [] | ||||||
|         bases = cls.__get_bases(bases) |         bases = mcs.__get_bases(bases) | ||||||
|         unique_bases = (b for b in bases if not (b in seen or seen.append(b))) |         unique_bases = (b for b in bases if not (b in seen or seen.append(b))) | ||||||
|         return BasesTuple(unique_bases) |         return BasesTuple(unique_bases) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def __get_bases(cls, bases): |     def __get_bases(mcs, bases): | ||||||
|         for base in bases: |         for base in bases: | ||||||
|             if base is object: |             if base is object: | ||||||
|                 continue |                 continue | ||||||
|             yield base |             yield base | ||||||
|             for child_base in cls.__get_bases(base.__bases__): |             for child_base in mcs.__get_bases(base.__bases__): | ||||||
|                 yield child_base |                 yield child_base | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def _import_classes(cls): |     def _import_classes(mcs): | ||||||
|         Document = _import_class('Document') |         Document = _import_class('Document') | ||||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') |         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||||
|         DictField = _import_class('DictField') |         DictField = _import_class('DictField') | ||||||
| @@ -254,9 +250,9 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | |||||||
|     collection in the database. |     collection in the database. | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     def __new__(cls, name, bases, attrs): |     def __new__(mcs, name, bases, attrs): | ||||||
|         flattened_bases = cls._get_bases(bases) |         flattened_bases = mcs._get_bases(bases) | ||||||
|         super_new = super(TopLevelDocumentMetaclass, cls).__new__ |         super_new = super(TopLevelDocumentMetaclass, mcs).__new__ | ||||||
|  |  | ||||||
|         # Set default _meta data if base class, otherwise get user defined meta |         # Set default _meta data if base class, otherwise get user defined meta | ||||||
|         if attrs.get('my_metaclass') == TopLevelDocumentMetaclass: |         if attrs.get('my_metaclass') == TopLevelDocumentMetaclass: | ||||||
| @@ -319,7 +315,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | |||||||
|                     not parent_doc_cls._meta.get('abstract', False)): |                     not parent_doc_cls._meta.get('abstract', False)): | ||||||
|                 msg = 'Abstract document cannot have non-abstract base' |                 msg = 'Abstract document cannot have non-abstract base' | ||||||
|                 raise ValueError(msg) |                 raise ValueError(msg) | ||||||
|             return super_new(cls, name, bases, attrs) |             return super_new(mcs, name, bases, attrs) | ||||||
|  |  | ||||||
|         # Merge base class metas. |         # Merge base class metas. | ||||||
|         # Uses a special MetaDict that handles various merging rules |         # Uses a special MetaDict that handles various merging rules | ||||||
| @@ -360,7 +356,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | |||||||
|         attrs['_meta'] = meta |         attrs['_meta'] = meta | ||||||
|  |  | ||||||
|         # Call super and get the new class |         # Call super and get the new class | ||||||
|         new_class = super_new(cls, name, bases, attrs) |         new_class = super_new(mcs, name, bases, attrs) | ||||||
|  |  | ||||||
|         meta = new_class._meta |         meta = new_class._meta | ||||||
|  |  | ||||||
| @@ -377,7 +373,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | |||||||
|             new_class.objects = QuerySetManager() |             new_class.objects = QuerySetManager() | ||||||
|  |  | ||||||
|         # Validate the fields and set primary key if needed |         # Validate the fields and set primary key if needed | ||||||
|         for field_name, field in new_class._fields.iteritems(): |         for field_name, field in iteritems(new_class._fields): | ||||||
|             if field.primary_key: |             if field.primary_key: | ||||||
|                 # Ensure only one primary key is set |                 # Ensure only one primary key is set | ||||||
|                 current_pk = new_class._meta.get('id_field') |                 current_pk = new_class._meta.get('id_field') | ||||||
| @@ -394,7 +390,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | |||||||
|                                            '_auto_id_field', False) |                                            '_auto_id_field', False) | ||||||
|         if not new_class._meta.get('id_field'): |         if not new_class._meta.get('id_field'): | ||||||
|             # After 0.10, find not existing names, instead of overwriting |             # After 0.10, find not existing names, instead of overwriting | ||||||
|             id_name, id_db_name = cls.get_auto_id_names(new_class) |             id_name, id_db_name = mcs.get_auto_id_names(new_class) | ||||||
|             new_class._auto_id_field = True |             new_class._auto_id_field = True | ||||||
|             new_class._meta['id_field'] = id_name |             new_class._meta['id_field'] = id_name | ||||||
|             new_class._fields[id_name] = ObjectIdField(db_field=id_db_name) |             new_class._fields[id_name] = ObjectIdField(db_field=id_db_name) | ||||||
| @@ -419,7 +415,7 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | |||||||
|         return new_class |         return new_class | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def get_auto_id_names(cls, new_class): |     def get_auto_id_names(mcs, new_class): | ||||||
|         id_name, id_db_name = ('id', '_id') |         id_name, id_db_name = ('id', '_id') | ||||||
|         if id_name not in new_class._fields and \ |         if id_name not in new_class._fields and \ | ||||||
|                 id_db_name not in (v.db_field for v in new_class._fields.values()): |                 id_db_name not in (v.db_field for v in new_class._fields.values()): | ||||||
| @@ -440,7 +436,7 @@ class MetaDict(dict): | |||||||
|     _merge_options = ('indexes',) |     _merge_options = ('indexes',) | ||||||
|  |  | ||||||
|     def merge(self, new_options): |     def merge(self, new_options): | ||||||
|         for k, v in new_options.iteritems(): |         for k, v in iteritems(new_options): | ||||||
|             if k in self._merge_options: |             if k in self._merge_options: | ||||||
|                 self[k] = self.get(k, []) + v |                 self[k] = self.get(k, []) + v | ||||||
|             else: |             else: | ||||||
|   | |||||||
							
								
								
									
										22
									
								
								mongoengine/base/utils.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										22
									
								
								mongoengine/base/utils.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,22 @@ | |||||||
|  | import re | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class LazyRegexCompiler(object): | ||||||
|  |     """Descriptor to allow lazy compilation of regex""" | ||||||
|  |  | ||||||
|  |     def __init__(self, pattern, flags=0): | ||||||
|  |         self._pattern = pattern | ||||||
|  |         self._flags = flags | ||||||
|  |         self._compiled_regex = None | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def compiled_regex(self): | ||||||
|  |         if self._compiled_regex is None: | ||||||
|  |             self._compiled_regex = re.compile(self._pattern, self._flags) | ||||||
|  |         return self._compiled_regex | ||||||
|  |  | ||||||
|  |     def __get__(self, instance, owner): | ||||||
|  |         return self.compiled_regex | ||||||
|  |  | ||||||
|  |     def __set__(self, instance, value): | ||||||
|  |         raise AttributeError("Can not set attribute LazyRegexCompiler") | ||||||
| @@ -31,7 +31,6 @@ def _import_class(cls_name): | |||||||
|  |  | ||||||
|     field_classes = _field_list_cache |     field_classes = _field_list_cache | ||||||
|  |  | ||||||
|     queryset_classes = ('OperationError',) |  | ||||||
|     deref_classes = ('DeReference',) |     deref_classes = ('DeReference',) | ||||||
|  |  | ||||||
|     if cls_name == 'BaseDocument': |     if cls_name == 'BaseDocument': | ||||||
| @@ -43,14 +42,11 @@ def _import_class(cls_name): | |||||||
|     elif cls_name in field_classes: |     elif cls_name in field_classes: | ||||||
|         from mongoengine import fields as module |         from mongoengine import fields as module | ||||||
|         import_classes = field_classes |         import_classes = field_classes | ||||||
|     elif cls_name in queryset_classes: |  | ||||||
|         from mongoengine import queryset as module |  | ||||||
|         import_classes = queryset_classes |  | ||||||
|     elif cls_name in deref_classes: |     elif cls_name in deref_classes: | ||||||
|         from mongoengine import dereference as module |         from mongoengine import dereference as module | ||||||
|         import_classes = deref_classes |         import_classes = deref_classes | ||||||
|     else: |     else: | ||||||
|         raise ValueError('No import set for: ' % cls_name) |         raise ValueError('No import set for: %s' % cls_name) | ||||||
|  |  | ||||||
|     for cls in import_classes: |     for cls in import_classes: | ||||||
|         _class_registry_cache[cls] = getattr(module, cls) |         _class_registry_cache[cls] = getattr(module, cls) | ||||||
|   | |||||||
| @@ -1,19 +1,30 @@ | |||||||
| from pymongo import MongoClient, ReadPreference, uri_parser | from pymongo import MongoClient, ReadPreference, uri_parser | ||||||
|  | from pymongo.database import _check_name | ||||||
| import six | import six | ||||||
|  |  | ||||||
| from mongoengine.python_support import IS_PYMONGO_3 | __all__ = [ | ||||||
|  |     'DEFAULT_CONNECTION_NAME', | ||||||
| __all__ = ['MongoEngineConnectionError', 'connect', 'register_connection', |     'DEFAULT_DATABASE_NAME', | ||||||
|            'DEFAULT_CONNECTION_NAME'] |     'MongoEngineConnectionError', | ||||||
|  |     'connect', | ||||||
|  |     'disconnect', | ||||||
|  |     'disconnect_all', | ||||||
|  |     'get_connection', | ||||||
|  |     'get_db', | ||||||
|  |     'register_connection', | ||||||
|  | ] | ||||||
|  |  | ||||||
|  |  | ||||||
| DEFAULT_CONNECTION_NAME = 'default' | DEFAULT_CONNECTION_NAME = 'default' | ||||||
|  | DEFAULT_DATABASE_NAME = 'test' | ||||||
|  | DEFAULT_HOST = 'localhost' | ||||||
|  | DEFAULT_PORT = 27017 | ||||||
|  |  | ||||||
| if IS_PYMONGO_3: | _connection_settings = {} | ||||||
|     READ_PREFERENCE = ReadPreference.PRIMARY | _connections = {} | ||||||
| else: | _dbs = {} | ||||||
|     from pymongo import MongoReplicaSetClient |  | ||||||
|     READ_PREFERENCE = False | READ_PREFERENCE = ReadPreference.PRIMARY | ||||||
|  |  | ||||||
|  |  | ||||||
| class MongoEngineConnectionError(Exception): | class MongoEngineConnectionError(Exception): | ||||||
| @@ -23,45 +34,48 @@ class MongoEngineConnectionError(Exception): | |||||||
|     pass |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
| _connection_settings = {} | def _check_db_name(name): | ||||||
| _connections = {} |     """Check if a database name is valid. | ||||||
| _dbs = {} |     This functionality is copied from pymongo Database class constructor. | ||||||
|  |     """ | ||||||
|  |     if not isinstance(name, six.string_types): | ||||||
|  |         raise TypeError('name must be an instance of %s' % six.string_types) | ||||||
|  |     elif name != '$external': | ||||||
|  |         _check_name(name) | ||||||
|  |  | ||||||
|  |  | ||||||
| def register_connection(alias, db=None, name=None, host=None, port=None, | def _get_connection_settings( | ||||||
|                         read_preference=READ_PREFERENCE, |         db=None, name=None, host=None, port=None, | ||||||
|                         username=None, password=None, |         read_preference=READ_PREFERENCE, | ||||||
|                         authentication_source=None, |         username=None, password=None, | ||||||
|                         authentication_mechanism=None, |         authentication_source=None, | ||||||
|                         **kwargs): |         authentication_mechanism=None, | ||||||
|     """Add a connection. |         **kwargs): | ||||||
|  |     """Get the connection settings as a dict | ||||||
|  |  | ||||||
|     :param alias: the name that will be used to refer to this connection |     : param db: the name of the database to use, for compatibility with connect | ||||||
|         throughout MongoEngine |     : param name: the name of the specific database to use | ||||||
|     :param name: the name of the specific database to use |     : param host: the host name of the: program: `mongod` instance to connect to | ||||||
|     :param db: the name of the database to use, for compatibility with connect |     : param port: the port that the: program: `mongod` instance is running on | ||||||
|     :param host: the host name of the :program:`mongod` instance to connect to |     : param read_preference: The read preference for the collection | ||||||
|     :param port: the port that the :program:`mongod` instance is running on |     : param username: username to authenticate with | ||||||
|     :param read_preference: The read preference for the collection |     : param password: password to authenticate with | ||||||
|        ** Added pymongo 2.1 |     : param authentication_source: database to authenticate against | ||||||
|     :param username: username to authenticate with |     : param authentication_mechanism: database authentication mechanisms. | ||||||
|     :param password: password to authenticate with |  | ||||||
|     :param authentication_source: database to authenticate against |  | ||||||
|     :param authentication_mechanism: database authentication mechanisms. |  | ||||||
|         By default, use SCRAM-SHA-1 with MongoDB 3.0 and later, |         By default, use SCRAM-SHA-1 with MongoDB 3.0 and later, | ||||||
|         MONGODB-CR (MongoDB Challenge Response protocol) for older servers. |         MONGODB-CR (MongoDB Challenge Response protocol) for older servers. | ||||||
|     :param is_mock: explicitly use mongomock for this connection |     : param is_mock: explicitly use mongomock for this connection | ||||||
|         (can also be done by using `mongomock://` as db host prefix) |         (can also be done by using `mongomock: // ` as db host prefix) | ||||||
|     :param kwargs: ad-hoc parameters to be passed into the pymongo driver, |     : param kwargs: ad-hoc parameters to be passed into the pymongo driver, | ||||||
|         for example maxpoolsize, tz_aware, etc. See the documentation |         for example maxpoolsize, tz_aware, etc. See the documentation | ||||||
|         for pymongo's `MongoClient` for a full list. |         for pymongo's `MongoClient` for a full list. | ||||||
|  |  | ||||||
|     .. versionchanged:: 0.10.6 - added mongomock support |     .. versionchanged:: 0.10.6 - added mongomock support | ||||||
|     """ |     """ | ||||||
|     conn_settings = { |     conn_settings = { | ||||||
|         'name': name or db or 'test', |         'name': name or db or DEFAULT_DATABASE_NAME, | ||||||
|         'host': host or 'localhost', |         'host': host or DEFAULT_HOST, | ||||||
|         'port': port or 27017, |         'port': port or DEFAULT_PORT, | ||||||
|         'read_preference': read_preference, |         'read_preference': read_preference, | ||||||
|         'username': username, |         'username': username, | ||||||
|         'password': password, |         'password': password, | ||||||
| @@ -69,6 +83,7 @@ def register_connection(alias, db=None, name=None, host=None, port=None, | |||||||
|         'authentication_mechanism': authentication_mechanism |         'authentication_mechanism': authentication_mechanism | ||||||
|     } |     } | ||||||
|  |  | ||||||
|  |     _check_db_name(conn_settings['name']) | ||||||
|     conn_host = conn_settings['host'] |     conn_host = conn_settings['host'] | ||||||
|  |  | ||||||
|     # Host can be a list or a string, so if string, force to a list. |     # Host can be a list or a string, so if string, force to a list. | ||||||
| @@ -104,6 +119,30 @@ def register_connection(alias, db=None, name=None, host=None, port=None, | |||||||
|                 conn_settings['authentication_source'] = uri_options['authsource'] |                 conn_settings['authentication_source'] = uri_options['authsource'] | ||||||
|             if 'authmechanism' in uri_options: |             if 'authmechanism' in uri_options: | ||||||
|                 conn_settings['authentication_mechanism'] = uri_options['authmechanism'] |                 conn_settings['authentication_mechanism'] = uri_options['authmechanism'] | ||||||
|  |             if 'readpreference' in uri_options: | ||||||
|  |                 read_preferences = ( | ||||||
|  |                     ReadPreference.NEAREST, | ||||||
|  |                     ReadPreference.PRIMARY, | ||||||
|  |                     ReadPreference.PRIMARY_PREFERRED, | ||||||
|  |                     ReadPreference.SECONDARY, | ||||||
|  |                     ReadPreference.SECONDARY_PREFERRED, | ||||||
|  |                 ) | ||||||
|  |  | ||||||
|  |                 # Starting with PyMongo v3.5, the "readpreference" option is | ||||||
|  |                 # returned as a string (e.g. "secondaryPreferred") and not an | ||||||
|  |                 # int (e.g. 3). | ||||||
|  |                 # TODO simplify the code below once we drop support for | ||||||
|  |                 # PyMongo v3.4. | ||||||
|  |                 read_pf_mode = uri_options['readpreference'] | ||||||
|  |                 if isinstance(read_pf_mode, six.string_types): | ||||||
|  |                     read_pf_mode = read_pf_mode.lower() | ||||||
|  |                 for preference in read_preferences: | ||||||
|  |                     if ( | ||||||
|  |                         preference.name.lower() == read_pf_mode or | ||||||
|  |                         preference.mode == read_pf_mode | ||||||
|  |                     ): | ||||||
|  |                         conn_settings['read_preference'] = preference | ||||||
|  |                         break | ||||||
|         else: |         else: | ||||||
|             resolved_hosts.append(entity) |             resolved_hosts.append(entity) | ||||||
|     conn_settings['host'] = resolved_hosts |     conn_settings['host'] = resolved_hosts | ||||||
| @@ -113,17 +152,74 @@ def register_connection(alias, db=None, name=None, host=None, port=None, | |||||||
|     kwargs.pop('is_slave', None) |     kwargs.pop('is_slave', None) | ||||||
|  |  | ||||||
|     conn_settings.update(kwargs) |     conn_settings.update(kwargs) | ||||||
|  |     return conn_settings | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def register_connection(alias, db=None, name=None, host=None, port=None, | ||||||
|  |                         read_preference=READ_PREFERENCE, | ||||||
|  |                         username=None, password=None, | ||||||
|  |                         authentication_source=None, | ||||||
|  |                         authentication_mechanism=None, | ||||||
|  |                         **kwargs): | ||||||
|  |     """Register the connection settings. | ||||||
|  |  | ||||||
|  |     : param alias: the name that will be used to refer to this connection | ||||||
|  |         throughout MongoEngine | ||||||
|  |     : param name: the name of the specific database to use | ||||||
|  |     : param db: the name of the database to use, for compatibility with connect | ||||||
|  |     : param host: the host name of the: program: `mongod` instance to connect to | ||||||
|  |     : param port: the port that the: program: `mongod` instance is running on | ||||||
|  |     : param read_preference: The read preference for the collection | ||||||
|  |     : param username: username to authenticate with | ||||||
|  |     : param password: password to authenticate with | ||||||
|  |     : param authentication_source: database to authenticate against | ||||||
|  |     : param authentication_mechanism: database authentication mechanisms. | ||||||
|  |         By default, use SCRAM-SHA-1 with MongoDB 3.0 and later, | ||||||
|  |         MONGODB-CR (MongoDB Challenge Response protocol) for older servers. | ||||||
|  |     : param is_mock: explicitly use mongomock for this connection | ||||||
|  |         (can also be done by using `mongomock: // ` as db host prefix) | ||||||
|  |     : param kwargs: ad-hoc parameters to be passed into the pymongo driver, | ||||||
|  |         for example maxpoolsize, tz_aware, etc. See the documentation | ||||||
|  |         for pymongo's `MongoClient` for a full list. | ||||||
|  |  | ||||||
|  |     .. versionchanged:: 0.10.6 - added mongomock support | ||||||
|  |     """ | ||||||
|  |     conn_settings = _get_connection_settings( | ||||||
|  |         db=db, name=name, host=host, port=port, | ||||||
|  |         read_preference=read_preference, | ||||||
|  |         username=username, password=password, | ||||||
|  |         authentication_source=authentication_source, | ||||||
|  |         authentication_mechanism=authentication_mechanism, | ||||||
|  |         **kwargs) | ||||||
|     _connection_settings[alias] = conn_settings |     _connection_settings[alias] = conn_settings | ||||||
|  |  | ||||||
|  |  | ||||||
| def disconnect(alias=DEFAULT_CONNECTION_NAME): | def disconnect(alias=DEFAULT_CONNECTION_NAME): | ||||||
|     """Close the connection with a given alias.""" |     """Close the connection with a given alias.""" | ||||||
|  |     from mongoengine.base.common import _get_documents_by_db | ||||||
|  |     from mongoengine import Document | ||||||
|  |  | ||||||
|     if alias in _connections: |     if alias in _connections: | ||||||
|         get_connection(alias=alias).close() |         get_connection(alias=alias).close() | ||||||
|         del _connections[alias] |         del _connections[alias] | ||||||
|  |  | ||||||
|     if alias in _dbs: |     if alias in _dbs: | ||||||
|  |         # Detach all cached collections in Documents | ||||||
|  |         for doc_cls in _get_documents_by_db(alias, DEFAULT_CONNECTION_NAME): | ||||||
|  |             if issubclass(doc_cls, Document):     # Skip EmbeddedDocument | ||||||
|  |                 doc_cls._disconnect() | ||||||
|  |  | ||||||
|         del _dbs[alias] |         del _dbs[alias] | ||||||
|  |  | ||||||
|  |     if alias in _connection_settings: | ||||||
|  |         del _connection_settings[alias] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def disconnect_all(): | ||||||
|  |     """Close all registered database.""" | ||||||
|  |     for alias in list(_connections.keys()): | ||||||
|  |         disconnect(alias) | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | ||||||
|     """Return a connection with a given alias.""" |     """Return a connection with a given alias.""" | ||||||
| @@ -147,7 +243,6 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | |||||||
|         raise MongoEngineConnectionError(msg) |         raise MongoEngineConnectionError(msg) | ||||||
|  |  | ||||||
|     def _clean_settings(settings_dict): |     def _clean_settings(settings_dict): | ||||||
|         # set literal more efficient than calling set function |  | ||||||
|         irrelevant_fields_set = { |         irrelevant_fields_set = { | ||||||
|             'name', 'username', 'password', |             'name', 'username', 'password', | ||||||
|             'authentication_source', 'authentication_mechanism' |             'authentication_source', 'authentication_mechanism' | ||||||
| @@ -157,10 +252,12 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | |||||||
|             if k not in irrelevant_fields_set |             if k not in irrelevant_fields_set | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  |     raw_conn_settings = _connection_settings[alias].copy() | ||||||
|  |  | ||||||
|     # Retrieve a copy of the connection settings associated with the requested |     # Retrieve a copy of the connection settings associated with the requested | ||||||
|     # alias and remove the database name and authentication info (we don't |     # alias and remove the database name and authentication info (we don't | ||||||
|     # care about them at this point). |     # care about them at this point). | ||||||
|     conn_settings = _clean_settings(_connection_settings[alias].copy()) |     conn_settings = _clean_settings(raw_conn_settings) | ||||||
|  |  | ||||||
|     # Determine if we should use PyMongo's or mongomock's MongoClient. |     # Determine if we should use PyMongo's or mongomock's MongoClient. | ||||||
|     is_mock = conn_settings.pop('is_mock', False) |     is_mock = conn_settings.pop('is_mock', False) | ||||||
| @@ -174,49 +271,58 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | |||||||
|     else: |     else: | ||||||
|         connection_class = MongoClient |         connection_class = MongoClient | ||||||
|  |  | ||||||
|         # For replica set connections with PyMongo 2.x, use |     # Re-use existing connection if one is suitable. | ||||||
|         # MongoReplicaSetClient. |     existing_connection = _find_existing_connection(raw_conn_settings) | ||||||
|         # TODO remove this once we stop supporting PyMongo 2.x. |     if existing_connection: | ||||||
|         if 'replicaSet' in conn_settings and not IS_PYMONGO_3: |         connection = existing_connection | ||||||
|             connection_class = MongoReplicaSetClient |     else: | ||||||
|             conn_settings['hosts_or_uri'] = conn_settings.pop('host', None) |         connection = _create_connection( | ||||||
|  |             alias=alias, | ||||||
|  |             connection_class=connection_class, | ||||||
|  |             **conn_settings | ||||||
|  |         ) | ||||||
|  |     _connections[alias] = connection | ||||||
|  |     return _connections[alias] | ||||||
|  |  | ||||||
|             # hosts_or_uri has to be a string, so if 'host' was provided |  | ||||||
|             # as a list, join its parts and separate them by ',' |  | ||||||
|             if isinstance(conn_settings['hosts_or_uri'], list): |  | ||||||
|                 conn_settings['hosts_or_uri'] = ','.join( |  | ||||||
|                     conn_settings['hosts_or_uri']) |  | ||||||
|  |  | ||||||
|             # Discard port since it can't be used on MongoReplicaSetClient | def _create_connection(alias, connection_class, **connection_settings): | ||||||
|             conn_settings.pop('port', None) |     """ | ||||||
|  |     Create the new connection for this alias. Raise | ||||||
|  |     MongoEngineConnectionError if it can't be established. | ||||||
|  |     """ | ||||||
|  |     try: | ||||||
|  |         return connection_class(**connection_settings) | ||||||
|  |     except Exception as e: | ||||||
|  |         raise MongoEngineConnectionError( | ||||||
|  |             'Cannot connect to database %s :\n%s' % (alias, e)) | ||||||
|  |  | ||||||
|     # Iterate over all of the connection settings and if a connection with |  | ||||||
|     # the same parameters is already established, use it instead of creating | def _find_existing_connection(connection_settings): | ||||||
|     # a new one. |     """ | ||||||
|     existing_connection = None |     Check if an existing connection could be reused | ||||||
|     connection_settings_iterator = ( |  | ||||||
|  |     Iterate over all of the connection settings and if an existing connection | ||||||
|  |     with the same parameters is suitable, return it | ||||||
|  |  | ||||||
|  |     :param connection_settings: the settings of the new connection | ||||||
|  |     :return: An existing connection or None | ||||||
|  |     """ | ||||||
|  |     connection_settings_bis = ( | ||||||
|         (db_alias, settings.copy()) |         (db_alias, settings.copy()) | ||||||
|         for db_alias, settings in _connection_settings.items() |         for db_alias, settings in _connection_settings.items() | ||||||
|     ) |     ) | ||||||
|     for db_alias, connection_settings in connection_settings_iterator: |  | ||||||
|         connection_settings = _clean_settings(connection_settings) |  | ||||||
|         if conn_settings == connection_settings and _connections.get(db_alias): |  | ||||||
|             existing_connection = _connections[db_alias] |  | ||||||
|             break |  | ||||||
|  |  | ||||||
|     # If an existing connection was found, assign it to the new alias |     def _clean_settings(settings_dict): | ||||||
|     if existing_connection: |         # Only remove the name but it's important to | ||||||
|         _connections[alias] = existing_connection |         # keep the username/password/authentication_source/authentication_mechanism | ||||||
|     else: |         # to identify if the connection could be shared (cfr https://github.com/MongoEngine/mongoengine/issues/2047) | ||||||
|         # Otherwise, create the new connection for this alias. Raise |         return {k: v for k, v in settings_dict.items() if k != 'name'} | ||||||
|         # MongoEngineConnectionError if it can't be established. |  | ||||||
|         try: |  | ||||||
|             _connections[alias] = connection_class(**conn_settings) |  | ||||||
|         except Exception as e: |  | ||||||
|             raise MongoEngineConnectionError( |  | ||||||
|                 'Cannot connect to database %s :\n%s' % (alias, e)) |  | ||||||
|  |  | ||||||
|     return _connections[alias] |     cleaned_conn_settings = _clean_settings(connection_settings) | ||||||
|  |     for db_alias, connection_settings in connection_settings_bis: | ||||||
|  |         db_conn_settings = _clean_settings(connection_settings) | ||||||
|  |         if cleaned_conn_settings == db_conn_settings and _connections.get(db_alias): | ||||||
|  |             return _connections[db_alias] | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | ||||||
| @@ -246,14 +352,27 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs): | |||||||
|     provide username and password arguments as well. |     provide username and password arguments as well. | ||||||
|  |  | ||||||
|     Multiple databases are supported by using aliases. Provide a separate |     Multiple databases are supported by using aliases. Provide a separate | ||||||
|     `alias` to connect to a different instance of :program:`mongod`. |     `alias` to connect to a different instance of: program: `mongod`. | ||||||
|  |  | ||||||
|  |     In order to replace a connection identified by a given alias, you'll | ||||||
|  |     need to call ``disconnect`` first | ||||||
|  |  | ||||||
|     See the docstring for `register_connection` for more details about all |     See the docstring for `register_connection` for more details about all | ||||||
|     supported kwargs. |     supported kwargs. | ||||||
|  |  | ||||||
|     .. versionchanged:: 0.6 - added multiple database support. |     .. versionchanged:: 0.6 - added multiple database support. | ||||||
|     """ |     """ | ||||||
|     if alias not in _connections: |     if alias in _connections: | ||||||
|  |         prev_conn_setting = _connection_settings[alias] | ||||||
|  |         new_conn_settings = _get_connection_settings(db, **kwargs) | ||||||
|  |  | ||||||
|  |         if new_conn_settings != prev_conn_setting: | ||||||
|  |             err_msg = ( | ||||||
|  |                 u'A different connection with alias `{}` was already ' | ||||||
|  |                 u'registered. Use disconnect() first' | ||||||
|  |             ).format(alias) | ||||||
|  |             raise MongoEngineConnectionError(err_msg) | ||||||
|  |     else: | ||||||
|         register_connection(alias, db, **kwargs) |         register_connection(alias, db, **kwargs) | ||||||
|  |  | ||||||
|     return get_connection(alias) |     return get_connection(alias) | ||||||
|   | |||||||
| @@ -1,8 +1,11 @@ | |||||||
| from contextlib import contextmanager | from contextlib import contextmanager | ||||||
|  |  | ||||||
| from pymongo.write_concern import WriteConcern | from pymongo.write_concern import WriteConcern | ||||||
|  | from six import iteritems | ||||||
|  |  | ||||||
| from mongoengine.common import _import_class | from mongoengine.common import _import_class | ||||||
| from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | ||||||
|  | from mongoengine.pymongo_support import count_documents | ||||||
|  |  | ||||||
| __all__ = ('switch_db', 'switch_collection', 'no_dereference', | __all__ = ('switch_db', 'switch_collection', 'no_dereference', | ||||||
|            'no_sub_classes', 'query_counter', 'set_write_concern') |            'no_sub_classes', 'query_counter', 'set_write_concern') | ||||||
| @@ -112,7 +115,7 @@ class no_dereference(object): | |||||||
|         GenericReferenceField = _import_class('GenericReferenceField') |         GenericReferenceField = _import_class('GenericReferenceField') | ||||||
|         ComplexBaseField = _import_class('ComplexBaseField') |         ComplexBaseField = _import_class('ComplexBaseField') | ||||||
|  |  | ||||||
|         self.deref_fields = [k for k, v in self.cls._fields.iteritems() |         self.deref_fields = [k for k, v in iteritems(self.cls._fields) | ||||||
|                              if isinstance(v, (ReferenceField, |                              if isinstance(v, (ReferenceField, | ||||||
|                                                GenericReferenceField, |                                                GenericReferenceField, | ||||||
|                                                ComplexBaseField))] |                                                ComplexBaseField))] | ||||||
| @@ -145,66 +148,85 @@ class no_sub_classes(object): | |||||||
|         :param cls: the class to turn querying sub classes on |         :param cls: the class to turn querying sub classes on | ||||||
|         """ |         """ | ||||||
|         self.cls = cls |         self.cls = cls | ||||||
|  |         self.cls_initial_subclasses = None | ||||||
|  |  | ||||||
|     def __enter__(self): |     def __enter__(self): | ||||||
|         """Change the objects default and _auto_dereference values.""" |         """Change the objects default and _auto_dereference values.""" | ||||||
|         self.cls._all_subclasses = self.cls._subclasses |         self.cls_initial_subclasses = self.cls._subclasses | ||||||
|         self.cls._subclasses = (self.cls,) |         self.cls._subclasses = (self.cls._class_name,) | ||||||
|         return self.cls |         return self.cls | ||||||
|  |  | ||||||
|     def __exit__(self, t, value, traceback): |     def __exit__(self, t, value, traceback): | ||||||
|         """Reset the default and _auto_dereference values.""" |         """Reset the default and _auto_dereference values.""" | ||||||
|         self.cls._subclasses = self.cls._all_subclasses |         self.cls._subclasses = self.cls_initial_subclasses | ||||||
|         delattr(self.cls, '_all_subclasses') |  | ||||||
|         return self.cls |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class query_counter(object): | class query_counter(object): | ||||||
|     """Query_counter context manager to get the number of queries.""" |     """Query_counter context manager to get the number of queries. | ||||||
|  |     This works by updating the `profiling_level` of the database so that all queries get logged, | ||||||
|  |     resetting the db.system.profile collection at the beginnig of the context and counting the new entries. | ||||||
|  |  | ||||||
|  |     This was designed for debugging purpose. In fact it is a global counter so queries issued by other threads/processes | ||||||
|  |     can interfere with it | ||||||
|  |  | ||||||
|  |     Be aware that: | ||||||
|  |     - Iterating over large amount of documents (>101) makes pymongo issue `getmore` queries to fetch the next batch of | ||||||
|  |         documents (https://docs.mongodb.com/manual/tutorial/iterate-a-cursor/#cursor-batches) | ||||||
|  |     - Some queries are ignored by default by the counter (killcursors, db.system.indexes) | ||||||
|  |     """ | ||||||
|  |  | ||||||
|     def __init__(self): |     def __init__(self): | ||||||
|         """Construct the query_counter.""" |         """Construct the query_counter | ||||||
|         self.counter = 0 |         """ | ||||||
|         self.db = get_db() |         self.db = get_db() | ||||||
|  |         self.initial_profiling_level = None | ||||||
|  |         self._ctx_query_counter = 0             # number of queries issued by the context | ||||||
|  |  | ||||||
|     def __enter__(self): |         self._ignored_query = { | ||||||
|         """On every with block we need to drop the profile collection.""" |             'ns': | ||||||
|  |                 {'$ne': '%s.system.indexes' % self.db.name}, | ||||||
|  |             'op':                       # MONGODB < 3.2 | ||||||
|  |                 {'$ne': 'killcursors'}, | ||||||
|  |             'command.killCursors':      # MONGODB >= 3.2 | ||||||
|  |                 {'$exists': False} | ||||||
|  |         } | ||||||
|  |  | ||||||
|  |     def _turn_on_profiling(self): | ||||||
|  |         self.initial_profiling_level = self.db.profiling_level() | ||||||
|         self.db.set_profiling_level(0) |         self.db.set_profiling_level(0) | ||||||
|         self.db.system.profile.drop() |         self.db.system.profile.drop() | ||||||
|         self.db.set_profiling_level(2) |         self.db.set_profiling_level(2) | ||||||
|  |  | ||||||
|  |     def _resets_profiling(self): | ||||||
|  |         self.db.set_profiling_level(self.initial_profiling_level) | ||||||
|  |  | ||||||
|  |     def __enter__(self): | ||||||
|  |         self._turn_on_profiling() | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
|     def __exit__(self, t, value, traceback): |     def __exit__(self, t, value, traceback): | ||||||
|         """Reset the profiling level.""" |         self._resets_profiling() | ||||||
|         self.db.set_profiling_level(0) |  | ||||||
|  |  | ||||||
|     def __eq__(self, value): |     def __eq__(self, value): | ||||||
|         """== Compare querycounter.""" |  | ||||||
|         counter = self._get_count() |         counter = self._get_count() | ||||||
|         return value == counter |         return value == counter | ||||||
|  |  | ||||||
|     def __ne__(self, value): |     def __ne__(self, value): | ||||||
|         """!= Compare querycounter.""" |  | ||||||
|         return not self.__eq__(value) |         return not self.__eq__(value) | ||||||
|  |  | ||||||
|     def __lt__(self, value): |     def __lt__(self, value): | ||||||
|         """< Compare querycounter.""" |  | ||||||
|         return self._get_count() < value |         return self._get_count() < value | ||||||
|  |  | ||||||
|     def __le__(self, value): |     def __le__(self, value): | ||||||
|         """<= Compare querycounter.""" |  | ||||||
|         return self._get_count() <= value |         return self._get_count() <= value | ||||||
|  |  | ||||||
|     def __gt__(self, value): |     def __gt__(self, value): | ||||||
|         """> Compare querycounter.""" |  | ||||||
|         return self._get_count() > value |         return self._get_count() > value | ||||||
|  |  | ||||||
|     def __ge__(self, value): |     def __ge__(self, value): | ||||||
|         """>= Compare querycounter.""" |  | ||||||
|         return self._get_count() >= value |         return self._get_count() >= value | ||||||
|  |  | ||||||
|     def __int__(self): |     def __int__(self): | ||||||
|         """int representation.""" |  | ||||||
|         return self._get_count() |         return self._get_count() | ||||||
|  |  | ||||||
|     def __repr__(self): |     def __repr__(self): | ||||||
| @@ -212,10 +234,12 @@ class query_counter(object): | |||||||
|         return u"%s" % self._get_count() |         return u"%s" % self._get_count() | ||||||
|  |  | ||||||
|     def _get_count(self): |     def _get_count(self): | ||||||
|         """Get the number of queries.""" |         """Get the number of queries by counting the current number of entries in db.system.profile | ||||||
|         ignore_query = {'ns': {'$ne': '%s.system.indexes' % self.db.name}} |         and substracting the queries issued by this context. In fact everytime this is called, 1 query is | ||||||
|         count = self.db.system.profile.find(ignore_query).count() - self.counter |         issued so we need to balance that | ||||||
|         self.counter += 1 |         """ | ||||||
|  |         count = count_documents(self.db.system.profile, self._ignored_query) - self._ctx_query_counter | ||||||
|  |         self._ctx_query_counter += 1    # Account for the query we just issued to gather the information | ||||||
|         return count |         return count | ||||||
|  |  | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,5 +1,6 @@ | |||||||
| from bson import DBRef, SON | from bson import DBRef, SON | ||||||
| import six | import six | ||||||
|  | from six import iteritems | ||||||
|  |  | ||||||
| from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList, | from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList, | ||||||
|                               TopLevelDocumentMetaclass, get_document) |                               TopLevelDocumentMetaclass, get_document) | ||||||
| @@ -52,26 +53,40 @@ class DeReference(object): | |||||||
|                         [i.__class__ == doc_type for i in items.values()]): |                         [i.__class__ == doc_type for i in items.values()]): | ||||||
|                     return items |                     return items | ||||||
|                 elif not field.dbref: |                 elif not field.dbref: | ||||||
|  |                     # We must turn the ObjectIds into DBRefs | ||||||
|  |  | ||||||
|  |                     # Recursively dig into the sub items of a list/dict | ||||||
|  |                     # to turn the ObjectIds into DBRefs | ||||||
|  |                     def _get_items_from_list(items): | ||||||
|  |                         new_items = [] | ||||||
|  |                         for v in items: | ||||||
|  |                             value = v | ||||||
|  |                             if isinstance(v, dict): | ||||||
|  |                                 value = _get_items_from_dict(v) | ||||||
|  |                             elif isinstance(v, list): | ||||||
|  |                                 value = _get_items_from_list(v) | ||||||
|  |                             elif not isinstance(v, (DBRef, Document)): | ||||||
|  |                                 value = field.to_python(v) | ||||||
|  |                             new_items.append(value) | ||||||
|  |                         return new_items | ||||||
|  |  | ||||||
|  |                     def _get_items_from_dict(items): | ||||||
|  |                         new_items = {} | ||||||
|  |                         for k, v in iteritems(items): | ||||||
|  |                             value = v | ||||||
|  |                             if isinstance(v, list): | ||||||
|  |                                 value = _get_items_from_list(v) | ||||||
|  |                             elif isinstance(v, dict): | ||||||
|  |                                 value = _get_items_from_dict(v) | ||||||
|  |                             elif not isinstance(v, (DBRef, Document)): | ||||||
|  |                                 value = field.to_python(v) | ||||||
|  |                             new_items[k] = value | ||||||
|  |                         return new_items | ||||||
|  |  | ||||||
|                     if not hasattr(items, 'items'): |                     if not hasattr(items, 'items'): | ||||||
|  |                         items = _get_items_from_list(items) | ||||||
|                         def _get_items(items): |  | ||||||
|                             new_items = [] |  | ||||||
|                             for v in items: |  | ||||||
|                                 if isinstance(v, list): |  | ||||||
|                                     new_items.append(_get_items(v)) |  | ||||||
|                                 elif not isinstance(v, (DBRef, Document)): |  | ||||||
|                                     new_items.append(field.to_python(v)) |  | ||||||
|                                 else: |  | ||||||
|                                     new_items.append(v) |  | ||||||
|                             return new_items |  | ||||||
|  |  | ||||||
|                         items = _get_items(items) |  | ||||||
|                     else: |                     else: | ||||||
|                         items = { |                         items = _get_items_from_dict(items) | ||||||
|                             k: (v if isinstance(v, (DBRef, Document)) |  | ||||||
|                                 else field.to_python(v)) |  | ||||||
|                             for k, v in items.iteritems() |  | ||||||
|                         } |  | ||||||
|  |  | ||||||
|         self.reference_map = self._find_references(items) |         self.reference_map = self._find_references(items) | ||||||
|         self.object_map = self._fetch_objects(doc_type=doc_type) |         self.object_map = self._fetch_objects(doc_type=doc_type) | ||||||
| @@ -98,7 +113,7 @@ class DeReference(object): | |||||||
|         depth += 1 |         depth += 1 | ||||||
|         for item in iterator: |         for item in iterator: | ||||||
|             if isinstance(item, (Document, EmbeddedDocument)): |             if isinstance(item, (Document, EmbeddedDocument)): | ||||||
|                 for field_name, field in item._fields.iteritems(): |                 for field_name, field in iteritems(item._fields): | ||||||
|                     v = item._data.get(field_name, None) |                     v = item._data.get(field_name, None) | ||||||
|                     if isinstance(v, LazyReference): |                     if isinstance(v, LazyReference): | ||||||
|                         # LazyReference inherits DBRef but should not be dereferenced here ! |                         # LazyReference inherits DBRef but should not be dereferenced here ! | ||||||
| @@ -110,7 +125,7 @@ class DeReference(object): | |||||||
|                     elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: |                     elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||||
|                         field_cls = getattr(getattr(field, 'field', None), 'document_type', None) |                         field_cls = getattr(getattr(field, 'field', None), 'document_type', None) | ||||||
|                         references = self._find_references(v, depth) |                         references = self._find_references(v, depth) | ||||||
|                         for key, refs in references.iteritems(): |                         for key, refs in iteritems(references): | ||||||
|                             if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): |                             if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): | ||||||
|                                 key = field_cls |                                 key = field_cls | ||||||
|                             reference_map.setdefault(key, set()).update(refs) |                             reference_map.setdefault(key, set()).update(refs) | ||||||
| @@ -123,7 +138,7 @@ class DeReference(object): | |||||||
|                 reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id) |                 reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id) | ||||||
|             elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: |             elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: | ||||||
|                 references = self._find_references(item, depth - 1) |                 references = self._find_references(item, depth - 1) | ||||||
|                 for key, refs in references.iteritems(): |                 for key, refs in iteritems(references): | ||||||
|                     reference_map.setdefault(key, set()).update(refs) |                     reference_map.setdefault(key, set()).update(refs) | ||||||
|  |  | ||||||
|         return reference_map |         return reference_map | ||||||
| @@ -132,16 +147,21 @@ class DeReference(object): | |||||||
|         """Fetch all references and convert to their document objects |         """Fetch all references and convert to their document objects | ||||||
|         """ |         """ | ||||||
|         object_map = {} |         object_map = {} | ||||||
|         for collection, dbrefs in self.reference_map.iteritems(): |         for collection, dbrefs in iteritems(self.reference_map): | ||||||
|             if hasattr(collection, 'objects'):  # We have a document class for the refs |  | ||||||
|  |             # we use getattr instead of hasattr because hasattr swallows any exception under python2 | ||||||
|  |             # so it could hide nasty things without raising exceptions (cfr bug #1688)) | ||||||
|  |             ref_document_cls_exists = (getattr(collection, 'objects', None) is not None) | ||||||
|  |  | ||||||
|  |             if ref_document_cls_exists: | ||||||
|                 col_name = collection._get_collection_name() |                 col_name = collection._get_collection_name() | ||||||
|                 refs = [dbref for dbref in dbrefs |                 refs = [dbref for dbref in dbrefs | ||||||
|                         if (col_name, dbref) not in object_map] |                         if (col_name, dbref) not in object_map] | ||||||
|                 references = collection.objects.in_bulk(refs) |                 references = collection.objects.in_bulk(refs) | ||||||
|                 for key, doc in references.iteritems(): |                 for key, doc in iteritems(references): | ||||||
|                     object_map[(col_name, key)] = doc |                     object_map[(col_name, key)] = doc | ||||||
|             else:  # Generic reference: use the refs data to convert to document |             else:  # Generic reference: use the refs data to convert to document | ||||||
|                 if isinstance(doc_type, (ListField, DictField, MapField,)): |                 if isinstance(doc_type, (ListField, DictField, MapField)): | ||||||
|                     continue |                     continue | ||||||
|  |  | ||||||
|                 refs = [dbref for dbref in dbrefs |                 refs = [dbref for dbref in dbrefs | ||||||
| @@ -210,7 +230,7 @@ class DeReference(object): | |||||||
|             data = [] |             data = [] | ||||||
|         else: |         else: | ||||||
|             is_list = False |             is_list = False | ||||||
|             iterator = items.iteritems() |             iterator = iteritems(items) | ||||||
|             data = {} |             data = {} | ||||||
|  |  | ||||||
|         depth += 1 |         depth += 1 | ||||||
|   | |||||||
| @@ -5,6 +5,7 @@ from bson.dbref import DBRef | |||||||
| import pymongo | import pymongo | ||||||
| from pymongo.read_preferences import ReadPreference | from pymongo.read_preferences import ReadPreference | ||||||
| import six | import six | ||||||
|  | from six import iteritems | ||||||
|  |  | ||||||
| from mongoengine import signals | from mongoengine import signals | ||||||
| from mongoengine.base import (BaseDict, BaseDocument, BaseList, | from mongoengine.base import (BaseDict, BaseDocument, BaseList, | ||||||
| @@ -12,10 +13,12 @@ from mongoengine.base import (BaseDict, BaseDocument, BaseList, | |||||||
|                               TopLevelDocumentMetaclass, get_document) |                               TopLevelDocumentMetaclass, get_document) | ||||||
| from mongoengine.common import _import_class | from mongoengine.common import _import_class | ||||||
| from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | ||||||
| from mongoengine.context_managers import switch_collection, switch_db | from mongoengine.context_managers import (set_write_concern, | ||||||
|  |                                           switch_collection, | ||||||
|  |                                           switch_db) | ||||||
| from mongoengine.errors import (InvalidDocumentError, InvalidQueryError, | from mongoengine.errors import (InvalidDocumentError, InvalidQueryError, | ||||||
|                                 SaveConditionError) |                                 SaveConditionError) | ||||||
| from mongoengine.python_support import IS_PYMONGO_3 | from mongoengine.pymongo_support import list_collection_names | ||||||
| from mongoengine.queryset import (NotUniqueError, OperationError, | from mongoengine.queryset import (NotUniqueError, OperationError, | ||||||
|                                   QuerySet, transform) |                                   QuerySet, transform) | ||||||
|  |  | ||||||
| @@ -39,7 +42,7 @@ class InvalidCollectionError(Exception): | |||||||
|     pass |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
| class EmbeddedDocument(BaseDocument): | class EmbeddedDocument(six.with_metaclass(DocumentMetaclass, BaseDocument)): | ||||||
|     """A :class:`~mongoengine.Document` that isn't stored in its own |     """A :class:`~mongoengine.Document` that isn't stored in its own | ||||||
|     collection.  :class:`~mongoengine.EmbeddedDocument`\ s should be used as |     collection.  :class:`~mongoengine.EmbeddedDocument`\ s should be used as | ||||||
|     fields on :class:`~mongoengine.Document`\ s through the |     fields on :class:`~mongoengine.Document`\ s through the | ||||||
| @@ -58,7 +61,6 @@ class EmbeddedDocument(BaseDocument): | |||||||
|     # The __metaclass__ attribute is removed by 2to3 when running with Python3 |     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||||
|     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 |     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||||
|     my_metaclass = DocumentMetaclass |     my_metaclass = DocumentMetaclass | ||||||
|     __metaclass__ = DocumentMetaclass |  | ||||||
|  |  | ||||||
|     # A generic embedded document doesn't have any immutable properties |     # A generic embedded document doesn't have any immutable properties | ||||||
|     # that describe it uniquely, hence it shouldn't be hashable. You can |     # that describe it uniquely, hence it shouldn't be hashable. You can | ||||||
| @@ -88,14 +90,8 @@ class EmbeddedDocument(BaseDocument): | |||||||
|  |  | ||||||
|         return data |         return data | ||||||
|  |  | ||||||
|     def save(self, *args, **kwargs): |  | ||||||
|         self._instance.save(*args, **kwargs) |  | ||||||
|  |  | ||||||
|     def reload(self, *args, **kwargs): | class Document(six.with_metaclass(TopLevelDocumentMetaclass, BaseDocument)): | ||||||
|         self._instance.reload(*args, **kwargs) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Document(BaseDocument): |  | ||||||
|     """The base class used for defining the structure and properties of |     """The base class used for defining the structure and properties of | ||||||
|     collections of documents stored in MongoDB. Inherit from this class, and |     collections of documents stored in MongoDB. Inherit from this class, and | ||||||
|     add fields as class attributes to define a document's structure. |     add fields as class attributes to define a document's structure. | ||||||
| @@ -150,7 +146,6 @@ class Document(BaseDocument): | |||||||
|     # The __metaclass__ attribute is removed by 2to3 when running with Python3 |     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||||
|     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 |     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||||
|     my_metaclass = TopLevelDocumentMetaclass |     my_metaclass = TopLevelDocumentMetaclass | ||||||
|     __metaclass__ = TopLevelDocumentMetaclass |  | ||||||
|  |  | ||||||
|     __slots__ = ('__objects',) |     __slots__ = ('__objects',) | ||||||
|  |  | ||||||
| @@ -172,8 +167,8 @@ class Document(BaseDocument): | |||||||
|         """ |         """ | ||||||
|         if self.pk is None: |         if self.pk is None: | ||||||
|             return super(BaseDocument, self).__hash__() |             return super(BaseDocument, self).__hash__() | ||||||
|         else: |  | ||||||
|             return hash(self.pk) |         return hash(self.pk) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def _get_db(cls): |     def _get_db(cls): | ||||||
| @@ -181,10 +176,21 @@ class Document(BaseDocument): | |||||||
|         return get_db(cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME)) |         return get_db(cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME)) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def _get_collection(cls): |     def _disconnect(cls): | ||||||
|         """Return a PyMongo collection for the document.""" |         """Detach the Document class from the (cached) database collection""" | ||||||
|         if not hasattr(cls, '_collection') or cls._collection is None: |         cls._collection = None | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def _get_collection(cls): | ||||||
|  |         """Return the PyMongo collection corresponding to this document. | ||||||
|  |  | ||||||
|  |         Upon first call, this method: | ||||||
|  |         1. Initializes a :class:`~pymongo.collection.Collection` corresponding | ||||||
|  |            to this document. | ||||||
|  |         2. Creates indexes defined in this document's :attr:`meta` dictionary. | ||||||
|  |            This happens only if `auto_create_index` is True. | ||||||
|  |         """ | ||||||
|  |         if not hasattr(cls, '_collection') or cls._collection is None: | ||||||
|             # Get the collection, either capped or regular. |             # Get the collection, either capped or regular. | ||||||
|             if cls._meta.get('max_size') or cls._meta.get('max_documents'): |             if cls._meta.get('max_size') or cls._meta.get('max_documents'): | ||||||
|                 cls._collection = cls._get_capped_collection() |                 cls._collection = cls._get_capped_collection() | ||||||
| @@ -221,7 +227,7 @@ class Document(BaseDocument): | |||||||
|  |  | ||||||
|         # If the collection already exists and has different options |         # If the collection already exists and has different options | ||||||
|         # (i.e. isn't capped or has different max/size), raise an error. |         # (i.e. isn't capped or has different max/size), raise an error. | ||||||
|         if collection_name in db.collection_names(): |         if collection_name in list_collection_names(db, include_system_collections=True): | ||||||
|             collection = db[collection_name] |             collection = db[collection_name] | ||||||
|             options = collection.options() |             options = collection.options() | ||||||
|             if ( |             if ( | ||||||
| @@ -246,7 +252,7 @@ class Document(BaseDocument): | |||||||
|         data = super(Document, self).to_mongo(*args, **kwargs) |         data = super(Document, self).to_mongo(*args, **kwargs) | ||||||
|  |  | ||||||
|         # If '_id' is None, try and set it from self._data. If that |         # If '_id' is None, try and set it from self._data. If that | ||||||
|         # doesn't exist either, remote '_id' from the SON completely. |         # doesn't exist either, remove '_id' from the SON completely. | ||||||
|         if data['_id'] is None: |         if data['_id'] is None: | ||||||
|             if self._data.get('id') is None: |             if self._data.get('id') is None: | ||||||
|                 del data['_id'] |                 del data['_id'] | ||||||
| @@ -352,24 +358,26 @@ class Document(BaseDocument): | |||||||
|         .. versionchanged:: 0.10.7 |         .. versionchanged:: 0.10.7 | ||||||
|             Add signal_kwargs argument |             Add signal_kwargs argument | ||||||
|         """ |         """ | ||||||
|  |         signal_kwargs = signal_kwargs or {} | ||||||
|  |  | ||||||
|         if self._meta.get('abstract'): |         if self._meta.get('abstract'): | ||||||
|             raise InvalidDocumentError('Cannot save an abstract document.') |             raise InvalidDocumentError('Cannot save an abstract document.') | ||||||
|  |  | ||||||
|         signal_kwargs = signal_kwargs or {} |  | ||||||
|         signals.pre_save.send(self.__class__, document=self, **signal_kwargs) |         signals.pre_save.send(self.__class__, document=self, **signal_kwargs) | ||||||
|  |  | ||||||
|         if validate: |         if validate: | ||||||
|             self.validate(clean=clean) |             self.validate(clean=clean) | ||||||
|  |  | ||||||
|         if write_concern is None: |         if write_concern is None: | ||||||
|             write_concern = {'w': 1} |             write_concern = {} | ||||||
|  |  | ||||||
|         doc = self.to_mongo() |         doc_id = self.to_mongo(fields=[self._meta['id_field']]) | ||||||
|  |         created = ('_id' not in doc_id or self._created or force_insert) | ||||||
|         created = ('_id' not in doc or self._created or force_insert) |  | ||||||
|  |  | ||||||
|         signals.pre_save_post_validation.send(self.__class__, document=self, |         signals.pre_save_post_validation.send(self.__class__, document=self, | ||||||
|                                               created=created, **signal_kwargs) |                                               created=created, **signal_kwargs) | ||||||
|  |         # it might be refreshed by the pre_save_post_validation hook, e.g., for etag generation | ||||||
|  |         doc = self.to_mongo() | ||||||
|  |  | ||||||
|         if self._meta.get('auto_create_index', True): |         if self._meta.get('auto_create_index', True): | ||||||
|             self.ensure_indexes() |             self.ensure_indexes() | ||||||
| @@ -429,21 +437,18 @@ class Document(BaseDocument): | |||||||
|         Helper method, should only be used inside save(). |         Helper method, should only be used inside save(). | ||||||
|         """ |         """ | ||||||
|         collection = self._get_collection() |         collection = self._get_collection() | ||||||
|  |         with set_write_concern(collection, write_concern) as wc_collection: | ||||||
|  |             if force_insert: | ||||||
|  |                 return wc_collection.insert_one(doc).inserted_id | ||||||
|  |             # insert_one will provoke UniqueError alongside save does not | ||||||
|  |             # therefore, it need to catch and call replace_one. | ||||||
|  |             if '_id' in doc: | ||||||
|  |                 raw_object = wc_collection.find_one_and_replace( | ||||||
|  |                     {'_id': doc['_id']}, doc) | ||||||
|  |                 if raw_object: | ||||||
|  |                     return doc['_id'] | ||||||
|  |  | ||||||
|         if force_insert: |             object_id = wc_collection.insert_one(doc).inserted_id | ||||||
|             return collection.insert(doc, **write_concern) |  | ||||||
|  |  | ||||||
|         object_id = collection.save(doc, **write_concern) |  | ||||||
|  |  | ||||||
|         # In PyMongo 3.0, the save() call calls internally the _update() call |  | ||||||
|         # but they forget to return the _id value passed back, therefore getting it back here |  | ||||||
|         # Correct behaviour in 2.X and in 3.0.1+ versions |  | ||||||
|         if not object_id and pymongo.version_tuple == (3, 0): |  | ||||||
|             pk_as_mongo_obj = self._fields.get(self._meta['id_field']).to_mongo(self.pk) |  | ||||||
|             object_id = ( |  | ||||||
|                 self._qs.filter(pk=pk_as_mongo_obj).first() and |  | ||||||
|                 self._qs.filter(pk=pk_as_mongo_obj).first().pk |  | ||||||
|             )  # TODO doesn't this make 2 queries? |  | ||||||
|  |  | ||||||
|         return object_id |         return object_id | ||||||
|  |  | ||||||
| @@ -490,8 +495,12 @@ class Document(BaseDocument): | |||||||
|         update_doc = self._get_update_doc() |         update_doc = self._get_update_doc() | ||||||
|         if update_doc: |         if update_doc: | ||||||
|             upsert = save_condition is None |             upsert = save_condition is None | ||||||
|             last_error = collection.update(select_dict, update_doc, |             with set_write_concern(collection, write_concern) as wc_collection: | ||||||
|                                            upsert=upsert, **write_concern) |                 last_error = wc_collection.update_one( | ||||||
|  |                     select_dict, | ||||||
|  |                     update_doc, | ||||||
|  |                     upsert=upsert | ||||||
|  |                 ).raw_result | ||||||
|             if not upsert and last_error['n'] == 0: |             if not upsert and last_error['n'] == 0: | ||||||
|                 raise SaveConditionError('Race condition preventing' |                 raise SaveConditionError('Race condition preventing' | ||||||
|                                          ' document update detected') |                                          ' document update detected') | ||||||
| @@ -585,9 +594,8 @@ class Document(BaseDocument): | |||||||
|         :param signal_kwargs: (optional) kwargs dictionary to be passed to |         :param signal_kwargs: (optional) kwargs dictionary to be passed to | ||||||
|             the signal calls. |             the signal calls. | ||||||
|         :param write_concern: Extra keyword arguments are passed down which |         :param write_concern: Extra keyword arguments are passed down which | ||||||
|             will be used as options for the resultant |             will be used as options for the resultant ``getLastError`` command. | ||||||
|             ``getLastError`` command.  For example, |             For example, ``save(..., w: 2, fsync: True)`` will | ||||||
|             ``save(..., write_concern={w: 2, fsync: True}, ...)`` will |  | ||||||
|             wait until at least two servers have recorded the write and |             wait until at least two servers have recorded the write and | ||||||
|             will force an fsync on the primary server. |             will force an fsync on the primary server. | ||||||
|  |  | ||||||
| @@ -599,7 +607,7 @@ class Document(BaseDocument): | |||||||
|  |  | ||||||
|         # Delete FileFields separately |         # Delete FileFields separately | ||||||
|         FileField = _import_class('FileField') |         FileField = _import_class('FileField') | ||||||
|         for name, field in self._fields.iteritems(): |         for name, field in iteritems(self._fields): | ||||||
|             if isinstance(field, FileField): |             if isinstance(field, FileField): | ||||||
|                 getattr(self, name).delete() |                 getattr(self, name).delete() | ||||||
|  |  | ||||||
| @@ -784,13 +792,13 @@ class Document(BaseDocument): | |||||||
|         .. versionchanged:: 0.10.7 |         .. versionchanged:: 0.10.7 | ||||||
|             :class:`OperationError` exception raised if no collection available |             :class:`OperationError` exception raised if no collection available | ||||||
|         """ |         """ | ||||||
|         col_name = cls._get_collection_name() |         coll_name = cls._get_collection_name() | ||||||
|         if not col_name: |         if not coll_name: | ||||||
|             raise OperationError('Document %s has no collection defined ' |             raise OperationError('Document %s has no collection defined ' | ||||||
|                                  '(is it abstract ?)' % cls) |                                  '(is it abstract ?)' % cls) | ||||||
|         cls._collection = None |         cls._collection = None | ||||||
|         db = cls._get_db() |         db = cls._get_db() | ||||||
|         db.drop_collection(col_name) |         db.drop_collection(coll_name) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def create_index(cls, keys, background=False, **kwargs): |     def create_index(cls, keys, background=False, **kwargs): | ||||||
| @@ -805,18 +813,13 @@ class Document(BaseDocument): | |||||||
|         index_spec = index_spec.copy() |         index_spec = index_spec.copy() | ||||||
|         fields = index_spec.pop('fields') |         fields = index_spec.pop('fields') | ||||||
|         drop_dups = kwargs.get('drop_dups', False) |         drop_dups = kwargs.get('drop_dups', False) | ||||||
|         if IS_PYMONGO_3 and drop_dups: |         if drop_dups: | ||||||
|             msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' |             msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' | ||||||
|             warnings.warn(msg, DeprecationWarning) |             warnings.warn(msg, DeprecationWarning) | ||||||
|         elif not IS_PYMONGO_3: |  | ||||||
|             index_spec['drop_dups'] = drop_dups |  | ||||||
|         index_spec['background'] = background |         index_spec['background'] = background | ||||||
|         index_spec.update(kwargs) |         index_spec.update(kwargs) | ||||||
|  |  | ||||||
|         if IS_PYMONGO_3: |         return cls._get_collection().create_index(fields, **index_spec) | ||||||
|             return cls._get_collection().create_index(fields, **index_spec) |  | ||||||
|         else: |  | ||||||
|             return cls._get_collection().ensure_index(fields, **index_spec) |  | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def ensure_index(cls, key_or_list, drop_dups=False, background=False, |     def ensure_index(cls, key_or_list, drop_dups=False, background=False, | ||||||
| @@ -831,11 +834,9 @@ class Document(BaseDocument): | |||||||
|         :param drop_dups: Was removed/ignored with MongoDB >2.7.5. The value |         :param drop_dups: Was removed/ignored with MongoDB >2.7.5. The value | ||||||
|             will be removed if PyMongo3+ is used |             will be removed if PyMongo3+ is used | ||||||
|         """ |         """ | ||||||
|         if IS_PYMONGO_3 and drop_dups: |         if drop_dups: | ||||||
|             msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' |             msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' | ||||||
|             warnings.warn(msg, DeprecationWarning) |             warnings.warn(msg, DeprecationWarning) | ||||||
|         elif not IS_PYMONGO_3: |  | ||||||
|             kwargs.update({'drop_dups': drop_dups}) |  | ||||||
|         return cls.create_index(key_or_list, background=background, **kwargs) |         return cls.create_index(key_or_list, background=background, **kwargs) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
| @@ -851,7 +852,7 @@ class Document(BaseDocument): | |||||||
|         drop_dups = cls._meta.get('index_drop_dups', False) |         drop_dups = cls._meta.get('index_drop_dups', False) | ||||||
|         index_opts = cls._meta.get('index_opts') or {} |         index_opts = cls._meta.get('index_opts') or {} | ||||||
|         index_cls = cls._meta.get('index_cls', True) |         index_cls = cls._meta.get('index_cls', True) | ||||||
|         if IS_PYMONGO_3 and drop_dups: |         if drop_dups: | ||||||
|             msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' |             msg = 'drop_dups is deprecated and is removed when using PyMongo 3+.' | ||||||
|             warnings.warn(msg, DeprecationWarning) |             warnings.warn(msg, DeprecationWarning) | ||||||
|  |  | ||||||
| @@ -882,11 +883,7 @@ class Document(BaseDocument): | |||||||
|                 if 'cls' in opts: |                 if 'cls' in opts: | ||||||
|                     del opts['cls'] |                     del opts['cls'] | ||||||
|  |  | ||||||
|                 if IS_PYMONGO_3: |                 collection.create_index(fields, background=background, **opts) | ||||||
|                     collection.create_index(fields, background=background, **opts) |  | ||||||
|                 else: |  | ||||||
|                     collection.ensure_index(fields, background=background, |  | ||||||
|                                             drop_dups=drop_dups, **opts) |  | ||||||
|  |  | ||||||
|         # If _cls is being used (for polymorphism), it needs an index, |         # If _cls is being used (for polymorphism), it needs an index, | ||||||
|         # only if another index doesn't begin with _cls |         # only if another index doesn't begin with _cls | ||||||
| @@ -897,12 +894,8 @@ class Document(BaseDocument): | |||||||
|             if 'cls' in index_opts: |             if 'cls' in index_opts: | ||||||
|                 del index_opts['cls'] |                 del index_opts['cls'] | ||||||
|  |  | ||||||
|             if IS_PYMONGO_3: |             collection.create_index('_cls', background=background, | ||||||
|                 collection.create_index('_cls', background=background, |                                     **index_opts) | ||||||
|                                         **index_opts) |  | ||||||
|             else: |  | ||||||
|                 collection.ensure_index('_cls', background=background, |  | ||||||
|                                         **index_opts) |  | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def list_indexes(cls): |     def list_indexes(cls): | ||||||
| @@ -997,10 +990,10 @@ class Document(BaseDocument): | |||||||
|         return {'missing': missing, 'extra': extra} |         return {'missing': missing, 'extra': extra} | ||||||
|  |  | ||||||
|  |  | ||||||
| class DynamicDocument(Document): | class DynamicDocument(six.with_metaclass(TopLevelDocumentMetaclass, Document)): | ||||||
|     """A Dynamic Document class allowing flexible, expandable and uncontrolled |     """A Dynamic Document class allowing flexible, expandable and uncontrolled | ||||||
|     schemas.  As a :class:`~mongoengine.Document` subclass, acts in the same |     schemas.  As a :class:`~mongoengine.Document` subclass, acts in the same | ||||||
|     way as an ordinary document but has expando style properties.  Any data |     way as an ordinary document but has expanded style properties.  Any data | ||||||
|     passed or set against the :class:`~mongoengine.DynamicDocument` that is |     passed or set against the :class:`~mongoengine.DynamicDocument` that is | ||||||
|     not a field is automatically converted into a |     not a field is automatically converted into a | ||||||
|     :class:`~mongoengine.fields.DynamicField` and data can be attributed to that |     :class:`~mongoengine.fields.DynamicField` and data can be attributed to that | ||||||
| @@ -1008,13 +1001,12 @@ class DynamicDocument(Document): | |||||||
|  |  | ||||||
|     .. note:: |     .. note:: | ||||||
|  |  | ||||||
|         There is one caveat on Dynamic Documents: fields cannot start with `_` |         There is one caveat on Dynamic Documents: undeclared fields cannot start with `_` | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     # The __metaclass__ attribute is removed by 2to3 when running with Python3 |     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||||
|     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 |     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||||
|     my_metaclass = TopLevelDocumentMetaclass |     my_metaclass = TopLevelDocumentMetaclass | ||||||
|     __metaclass__ = TopLevelDocumentMetaclass |  | ||||||
|  |  | ||||||
|     _dynamic = True |     _dynamic = True | ||||||
|  |  | ||||||
| @@ -1030,7 +1022,7 @@ class DynamicDocument(Document): | |||||||
|             super(DynamicDocument, self).__delattr__(*args, **kwargs) |             super(DynamicDocument, self).__delattr__(*args, **kwargs) | ||||||
|  |  | ||||||
|  |  | ||||||
| class DynamicEmbeddedDocument(EmbeddedDocument): | class DynamicEmbeddedDocument(six.with_metaclass(DocumentMetaclass, EmbeddedDocument)): | ||||||
|     """A Dynamic Embedded Document class allowing flexible, expandable and |     """A Dynamic Embedded Document class allowing flexible, expandable and | ||||||
|     uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more |     uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more | ||||||
|     information about dynamic documents. |     information about dynamic documents. | ||||||
| @@ -1039,7 +1031,6 @@ class DynamicEmbeddedDocument(EmbeddedDocument): | |||||||
|     # The __metaclass__ attribute is removed by 2to3 when running with Python3 |     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||||
|     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 |     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||||
|     my_metaclass = DocumentMetaclass |     my_metaclass = DocumentMetaclass | ||||||
|     __metaclass__ = DocumentMetaclass |  | ||||||
|  |  | ||||||
|     _dynamic = True |     _dynamic = True | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,11 +1,12 @@ | |||||||
| from collections import defaultdict | from collections import defaultdict | ||||||
|  |  | ||||||
| import six | import six | ||||||
|  | from six import iteritems | ||||||
|  |  | ||||||
| __all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError', | __all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError', | ||||||
|            'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError', |            'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError', | ||||||
|            'OperationError', 'NotUniqueError', 'FieldDoesNotExist', |            'OperationError', 'NotUniqueError', 'FieldDoesNotExist', | ||||||
|            'ValidationError', 'SaveConditionError') |            'ValidationError', 'SaveConditionError', 'DeprecatedError') | ||||||
|  |  | ||||||
|  |  | ||||||
| class NotRegistered(Exception): | class NotRegistered(Exception): | ||||||
| @@ -71,6 +72,7 @@ class ValidationError(AssertionError): | |||||||
|     _message = None |     _message = None | ||||||
|  |  | ||||||
|     def __init__(self, message='', **kwargs): |     def __init__(self, message='', **kwargs): | ||||||
|  |         super(ValidationError, self).__init__(message) | ||||||
|         self.errors = kwargs.get('errors', {}) |         self.errors = kwargs.get('errors', {}) | ||||||
|         self.field_name = kwargs.get('field_name') |         self.field_name = kwargs.get('field_name') | ||||||
|         self.message = message |         self.message = message | ||||||
| @@ -108,11 +110,8 @@ class ValidationError(AssertionError): | |||||||
|  |  | ||||||
|         def build_dict(source): |         def build_dict(source): | ||||||
|             errors_dict = {} |             errors_dict = {} | ||||||
|             if not source: |  | ||||||
|                 return errors_dict |  | ||||||
|  |  | ||||||
|             if isinstance(source, dict): |             if isinstance(source, dict): | ||||||
|                 for field_name, error in source.iteritems(): |                 for field_name, error in iteritems(source): | ||||||
|                     errors_dict[field_name] = build_dict(error) |                     errors_dict[field_name] = build_dict(error) | ||||||
|             elif isinstance(source, ValidationError) and source.errors: |             elif isinstance(source, ValidationError) and source.errors: | ||||||
|                 return build_dict(source.errors) |                 return build_dict(source.errors) | ||||||
| @@ -134,12 +133,17 @@ class ValidationError(AssertionError): | |||||||
|                 value = ' '.join([generate_key(k) for k in value]) |                 value = ' '.join([generate_key(k) for k in value]) | ||||||
|             elif isinstance(value, dict): |             elif isinstance(value, dict): | ||||||
|                 value = ' '.join( |                 value = ' '.join( | ||||||
|                     [generate_key(v, k) for k, v in value.iteritems()]) |                     [generate_key(v, k) for k, v in iteritems(value)]) | ||||||
|  |  | ||||||
|             results = '%s.%s' % (prefix, value) if prefix else value |             results = '%s.%s' % (prefix, value) if prefix else value | ||||||
|             return results |             return results | ||||||
|  |  | ||||||
|         error_dict = defaultdict(list) |         error_dict = defaultdict(list) | ||||||
|         for k, v in self.to_dict().iteritems(): |         for k, v in iteritems(self.to_dict()): | ||||||
|             error_dict[generate_key(v)].append(k) |             error_dict[generate_key(v)].append(k) | ||||||
|         return ' '.join(['%s: %s' % (k, v) for k, v in error_dict.iteritems()]) |         return ' '.join(['%s: %s' % (k, v) for k, v in iteritems(error_dict)]) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class DeprecatedError(Exception): | ||||||
|  |     """Raise when a user uses a feature that has been Deprecated""" | ||||||
|  |     pass | ||||||
|   | |||||||
| @@ -5,13 +5,13 @@ import re | |||||||
| import socket | import socket | ||||||
| import time | import time | ||||||
| import uuid | import uuid | ||||||
| import warnings |  | ||||||
| from operator import itemgetter | from operator import itemgetter | ||||||
|  |  | ||||||
| from bson import Binary, DBRef, ObjectId, SON | from bson import Binary, DBRef, ObjectId, SON | ||||||
| import gridfs | import gridfs | ||||||
| import pymongo | import pymongo | ||||||
| import six | import six | ||||||
|  | from six import iteritems | ||||||
|  |  | ||||||
| try: | try: | ||||||
|     import dateutil |     import dateutil | ||||||
| @@ -25,15 +25,19 @@ try: | |||||||
| except ImportError: | except ImportError: | ||||||
|     Int64 = long |     Int64 = long | ||||||
|  |  | ||||||
|  |  | ||||||
| from mongoengine.base import (BaseDocument, BaseField, ComplexBaseField, | from mongoengine.base import (BaseDocument, BaseField, ComplexBaseField, | ||||||
|                               GeoJsonBaseField, LazyReference, ObjectIdField, |                               GeoJsonBaseField, LazyReference, ObjectIdField, | ||||||
|                               get_document) |                               get_document) | ||||||
|  | from mongoengine.base.utils import LazyRegexCompiler | ||||||
| from mongoengine.common import _import_class | from mongoengine.common import _import_class | ||||||
| from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | ||||||
| from mongoengine.document import Document, EmbeddedDocument | from mongoengine.document import Document, EmbeddedDocument | ||||||
| from mongoengine.errors import DoesNotExist, InvalidQueryError, ValidationError | from mongoengine.errors import DoesNotExist, InvalidQueryError, ValidationError | ||||||
| from mongoengine.python_support import StringIO | from mongoengine.python_support import StringIO | ||||||
| from mongoengine.queryset import DO_NOTHING, QuerySet | from mongoengine.queryset import DO_NOTHING | ||||||
|  | from mongoengine.queryset.base import BaseQuerySet | ||||||
|  | from mongoengine.queryset.transform import STRING_OPERATORS | ||||||
|  |  | ||||||
| try: | try: | ||||||
|     from PIL import Image, ImageOps |     from PIL import Image, ImageOps | ||||||
| @@ -41,9 +45,15 @@ except ImportError: | |||||||
|     Image = None |     Image = None | ||||||
|     ImageOps = None |     ImageOps = None | ||||||
|  |  | ||||||
|  | if six.PY3: | ||||||
|  |     # Useless as long as 2to3 gets executed | ||||||
|  |     # as it turns `long` into `int` blindly | ||||||
|  |     long = int | ||||||
|  |  | ||||||
|  |  | ||||||
| __all__ = ( | __all__ = ( | ||||||
|     'StringField', 'URLField', 'EmailField', 'IntField', 'LongField', |     'StringField', 'URLField', 'EmailField', 'IntField', 'LongField', | ||||||
|     'FloatField', 'DecimalField', 'BooleanField', 'DateTimeField', |     'FloatField', 'DecimalField', 'BooleanField', 'DateTimeField', 'DateField', | ||||||
|     'ComplexDateTimeField', 'EmbeddedDocumentField', 'ObjectIdField', |     'ComplexDateTimeField', 'EmbeddedDocumentField', 'ObjectIdField', | ||||||
|     'GenericEmbeddedDocumentField', 'DynamicField', 'ListField', |     'GenericEmbeddedDocumentField', 'DynamicField', 'ListField', | ||||||
|     'SortedListField', 'EmbeddedDocumentListField', 'DictField', |     'SortedListField', 'EmbeddedDocumentListField', 'DictField', | ||||||
| @@ -97,11 +107,11 @@ class StringField(BaseField): | |||||||
|         if not isinstance(op, six.string_types): |         if not isinstance(op, six.string_types): | ||||||
|             return value |             return value | ||||||
|  |  | ||||||
|         if op.lstrip('i') in ('startswith', 'endswith', 'contains', 'exact'): |         if op in STRING_OPERATORS: | ||||||
|             flags = 0 |             case_insensitive = op.startswith('i') | ||||||
|             if op.startswith('i'): |             op = op.lstrip('i') | ||||||
|                 flags = re.IGNORECASE |  | ||||||
|                 op = op.lstrip('i') |             flags = re.IGNORECASE if case_insensitive else 0 | ||||||
|  |  | ||||||
|             regex = r'%s' |             regex = r'%s' | ||||||
|             if op == 'startswith': |             if op == 'startswith': | ||||||
| @@ -123,9 +133,9 @@ class URLField(StringField): | |||||||
|     .. versionadded:: 0.3 |     .. versionadded:: 0.3 | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     _URL_REGEX = re.compile( |     _URL_REGEX = LazyRegexCompiler( | ||||||
|         r'^(?:[a-z0-9\.\-]*)://'  # scheme is validated separately |         r'^(?:[a-z0-9\.\-]*)://'  # scheme is validated separately | ||||||
|         r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}(?<!-)\.?)|'  # domain... |         r'(?:(?:[A-Z0-9](?:[A-Z0-9-_]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}(?<!-)\.?)|'  # domain... | ||||||
|         r'localhost|'  # localhost... |         r'localhost|'  # localhost... | ||||||
|         r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|'  # ...or ipv4 |         r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|'  # ...or ipv4 | ||||||
|         r'\[?[A-F0-9]*:[A-F0-9:]+\]?)'  # ...or ipv6 |         r'\[?[A-F0-9]*:[A-F0-9:]+\]?)'  # ...or ipv6 | ||||||
| @@ -133,8 +143,7 @@ class URLField(StringField): | |||||||
|         r'(?:/?|[/?]\S+)$', re.IGNORECASE) |         r'(?:/?|[/?]\S+)$', re.IGNORECASE) | ||||||
|     _URL_SCHEMES = ['http', 'https', 'ftp', 'ftps'] |     _URL_SCHEMES = ['http', 'https', 'ftp', 'ftps'] | ||||||
|  |  | ||||||
|     def __init__(self, verify_exists=False, url_regex=None, schemes=None, **kwargs): |     def __init__(self, url_regex=None, schemes=None, **kwargs): | ||||||
|         self.verify_exists = verify_exists |  | ||||||
|         self.url_regex = url_regex or self._URL_REGEX |         self.url_regex = url_regex or self._URL_REGEX | ||||||
|         self.schemes = schemes or self._URL_SCHEMES |         self.schemes = schemes or self._URL_SCHEMES | ||||||
|         super(URLField, self).__init__(**kwargs) |         super(URLField, self).__init__(**kwargs) | ||||||
| @@ -144,12 +153,10 @@ class URLField(StringField): | |||||||
|         scheme = value.split('://')[0].lower() |         scheme = value.split('://')[0].lower() | ||||||
|         if scheme not in self.schemes: |         if scheme not in self.schemes: | ||||||
|             self.error(u'Invalid scheme {} in URL: {}'.format(scheme, value)) |             self.error(u'Invalid scheme {} in URL: {}'.format(scheme, value)) | ||||||
|             return |  | ||||||
|  |  | ||||||
|         # Then check full URL |         # Then check full URL | ||||||
|         if not self.url_regex.match(value): |         if not self.url_regex.match(value): | ||||||
|             self.error(u'Invalid URL: {}'.format(value)) |             self.error(u'Invalid URL: {}'.format(value)) | ||||||
|             return |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class EmailField(StringField): | class EmailField(StringField): | ||||||
| @@ -157,7 +164,7 @@ class EmailField(StringField): | |||||||
|  |  | ||||||
|     .. versionadded:: 0.4 |     .. versionadded:: 0.4 | ||||||
|     """ |     """ | ||||||
|     USER_REGEX = re.compile( |     USER_REGEX = LazyRegexCompiler( | ||||||
|         # `dot-atom` defined in RFC 5322 Section 3.2.3. |         # `dot-atom` defined in RFC 5322 Section 3.2.3. | ||||||
|         r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*\Z" |         r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*\Z" | ||||||
|         # `quoted-string` defined in RFC 5322 Section 3.2.4. |         # `quoted-string` defined in RFC 5322 Section 3.2.4. | ||||||
| @@ -165,7 +172,7 @@ class EmailField(StringField): | |||||||
|         re.IGNORECASE |         re.IGNORECASE | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|     UTF8_USER_REGEX = re.compile( |     UTF8_USER_REGEX = LazyRegexCompiler( | ||||||
|         six.u( |         six.u( | ||||||
|             # RFC 6531 Section 3.3 extends `atext` (used by dot-atom) to |             # RFC 6531 Section 3.3 extends `atext` (used by dot-atom) to | ||||||
|             # include `UTF8-non-ascii`. |             # include `UTF8-non-ascii`. | ||||||
| @@ -175,7 +182,7 @@ class EmailField(StringField): | |||||||
|         ), re.IGNORECASE | re.UNICODE |         ), re.IGNORECASE | re.UNICODE | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|     DOMAIN_REGEX = re.compile( |     DOMAIN_REGEX = LazyRegexCompiler( | ||||||
|         r'((?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+)(?:[A-Z0-9-]{2,63}(?<!-))\Z', |         r'((?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+)(?:[A-Z0-9-]{2,63}(?<!-))\Z', | ||||||
|         re.IGNORECASE |         re.IGNORECASE | ||||||
|     ) |     ) | ||||||
| @@ -251,10 +258,10 @@ class EmailField(StringField): | |||||||
|             try: |             try: | ||||||
|                 domain_part = domain_part.encode('idna').decode('ascii') |                 domain_part = domain_part.encode('idna').decode('ascii') | ||||||
|             except UnicodeError: |             except UnicodeError: | ||||||
|                 self.error(self.error_msg % value) |                 self.error("%s %s" % (self.error_msg % value, "(domain failed IDN encoding)")) | ||||||
|             else: |             else: | ||||||
|                 if not self.validate_domain_part(domain_part): |                 if not self.validate_domain_part(domain_part): | ||||||
|                     self.error(self.error_msg % value) |                     self.error("%s %s" % (self.error_msg % value, "(domain validation failed)")) | ||||||
|  |  | ||||||
|  |  | ||||||
| class IntField(BaseField): | class IntField(BaseField): | ||||||
| @@ -267,14 +274,14 @@ class IntField(BaseField): | |||||||
|     def to_python(self, value): |     def to_python(self, value): | ||||||
|         try: |         try: | ||||||
|             value = int(value) |             value = int(value) | ||||||
|         except ValueError: |         except (TypeError, ValueError): | ||||||
|             pass |             pass | ||||||
|         return value |         return value | ||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         try: |         try: | ||||||
|             value = int(value) |             value = int(value) | ||||||
|         except Exception: |         except (TypeError, ValueError): | ||||||
|             self.error('%s could not be converted to int' % value) |             self.error('%s could not be converted to int' % value) | ||||||
|  |  | ||||||
|         if self.min_value is not None and value < self.min_value: |         if self.min_value is not None and value < self.min_value: | ||||||
| @@ -300,7 +307,7 @@ class LongField(BaseField): | |||||||
|     def to_python(self, value): |     def to_python(self, value): | ||||||
|         try: |         try: | ||||||
|             value = long(value) |             value = long(value) | ||||||
|         except ValueError: |         except (TypeError, ValueError): | ||||||
|             pass |             pass | ||||||
|         return value |         return value | ||||||
|  |  | ||||||
| @@ -310,7 +317,7 @@ class LongField(BaseField): | |||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         try: |         try: | ||||||
|             value = long(value) |             value = long(value) | ||||||
|         except Exception: |         except (TypeError, ValueError): | ||||||
|             self.error('%s could not be converted to long' % value) |             self.error('%s could not be converted to long' % value) | ||||||
|  |  | ||||||
|         if self.min_value is not None and value < self.min_value: |         if self.min_value is not None and value < self.min_value: | ||||||
| @@ -364,7 +371,8 @@ class FloatField(BaseField): | |||||||
|  |  | ||||||
|  |  | ||||||
| class DecimalField(BaseField): | class DecimalField(BaseField): | ||||||
|     """Fixed-point decimal number field. |     """Fixed-point decimal number field. Stores the value as a float by default unless `force_string` is used. | ||||||
|  |     If using floats, beware of Decimal to float conversion (potential precision loss) | ||||||
|  |  | ||||||
|     .. versionchanged:: 0.8 |     .. versionchanged:: 0.8 | ||||||
|     .. versionadded:: 0.3 |     .. versionadded:: 0.3 | ||||||
| @@ -375,7 +383,9 @@ class DecimalField(BaseField): | |||||||
|         """ |         """ | ||||||
|         :param min_value: Validation rule for the minimum acceptable value. |         :param min_value: Validation rule for the minimum acceptable value. | ||||||
|         :param max_value: Validation rule for the maximum acceptable value. |         :param max_value: Validation rule for the maximum acceptable value. | ||||||
|         :param force_string: Store as a string. |         :param force_string: Store the value as a string (instead of a float). | ||||||
|  |          Be aware that this affects query sorting and operation like lte, gte (as string comparison is applied) | ||||||
|  |          and some query operator won't work (e.g: inc, dec) | ||||||
|         :param precision: Number of decimal places to store. |         :param precision: Number of decimal places to store. | ||||||
|         :param rounding: The rounding rule from the python decimal library: |         :param rounding: The rounding rule from the python decimal library: | ||||||
|  |  | ||||||
| @@ -406,7 +416,7 @@ class DecimalField(BaseField): | |||||||
|         # Convert to string for python 2.6 before casting to Decimal |         # Convert to string for python 2.6 before casting to Decimal | ||||||
|         try: |         try: | ||||||
|             value = decimal.Decimal('%s' % value) |             value = decimal.Decimal('%s' % value) | ||||||
|         except decimal.InvalidOperation: |         except (TypeError, ValueError, decimal.InvalidOperation): | ||||||
|             return value |             return value | ||||||
|         return value.quantize(decimal.Decimal('.%s' % ('0' * self.precision)), rounding=self.rounding) |         return value.quantize(decimal.Decimal('.%s' % ('0' * self.precision)), rounding=self.rounding) | ||||||
|  |  | ||||||
| @@ -423,7 +433,7 @@ class DecimalField(BaseField): | |||||||
|                 value = six.text_type(value) |                 value = six.text_type(value) | ||||||
|             try: |             try: | ||||||
|                 value = decimal.Decimal(value) |                 value = decimal.Decimal(value) | ||||||
|             except Exception as exc: |             except (TypeError, ValueError, decimal.InvalidOperation) as exc: | ||||||
|                 self.error('Could not convert value to decimal: %s' % exc) |                 self.error('Could not convert value to decimal: %s' % exc) | ||||||
|  |  | ||||||
|         if self.min_value is not None and value < self.min_value: |         if self.min_value is not None and value < self.min_value: | ||||||
| @@ -462,6 +472,8 @@ class DateTimeField(BaseField): | |||||||
|     installed you can utilise it to convert varying types of date formats into valid |     installed you can utilise it to convert varying types of date formats into valid | ||||||
|     python datetime objects. |     python datetime objects. | ||||||
|  |  | ||||||
|  |     Note: To default the field to the current datetime, use: DateTimeField(default=datetime.utcnow) | ||||||
|  |  | ||||||
|     Note: Microseconds are rounded to the nearest millisecond. |     Note: Microseconds are rounded to the nearest millisecond. | ||||||
|       Pre UTC microsecond support is effectively broken. |       Pre UTC microsecond support is effectively broken. | ||||||
|       Use :class:`~mongoengine.fields.ComplexDateTimeField` if you |       Use :class:`~mongoengine.fields.ComplexDateTimeField` if you | ||||||
| @@ -486,15 +498,18 @@ class DateTimeField(BaseField): | |||||||
|         if not isinstance(value, six.string_types): |         if not isinstance(value, six.string_types): | ||||||
|             return None |             return None | ||||||
|  |  | ||||||
|  |         return self._parse_datetime(value) | ||||||
|  |  | ||||||
|  |     def _parse_datetime(self, value): | ||||||
|  |         # Attempt to parse a datetime from a string | ||||||
|         value = value.strip() |         value = value.strip() | ||||||
|         if not value: |         if not value: | ||||||
|             return None |             return None | ||||||
|  |  | ||||||
|         # Attempt to parse a datetime: |  | ||||||
|         if dateutil: |         if dateutil: | ||||||
|             try: |             try: | ||||||
|                 return dateutil.parser.parse(value) |                 return dateutil.parser.parse(value) | ||||||
|             except (TypeError, ValueError): |             except (TypeError, ValueError, OverflowError): | ||||||
|                 return None |                 return None | ||||||
|  |  | ||||||
|         # split usecs, because they are not recognized by strptime. |         # split usecs, because they are not recognized by strptime. | ||||||
| @@ -525,6 +540,22 @@ class DateTimeField(BaseField): | |||||||
|         return super(DateTimeField, self).prepare_query_value(op, self.to_mongo(value)) |         return super(DateTimeField, self).prepare_query_value(op, self.to_mongo(value)) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class DateField(DateTimeField): | ||||||
|  |     def to_mongo(self, value): | ||||||
|  |         value = super(DateField, self).to_mongo(value) | ||||||
|  |         # drop hours, minutes, seconds | ||||||
|  |         if isinstance(value, datetime.datetime): | ||||||
|  |             value = datetime.datetime(value.year, value.month, value.day) | ||||||
|  |         return value | ||||||
|  |  | ||||||
|  |     def to_python(self, value): | ||||||
|  |         value = super(DateField, self).to_python(value) | ||||||
|  |         # convert datetime to date | ||||||
|  |         if isinstance(value, datetime.datetime): | ||||||
|  |             value = datetime.date(value.year, value.month, value.day) | ||||||
|  |         return value | ||||||
|  |  | ||||||
|  |  | ||||||
| class ComplexDateTimeField(StringField): | class ComplexDateTimeField(StringField): | ||||||
|     """ |     """ | ||||||
|     ComplexDateTimeField handles microseconds exactly instead of rounding |     ComplexDateTimeField handles microseconds exactly instead of rounding | ||||||
| @@ -541,11 +572,15 @@ class ComplexDateTimeField(StringField): | |||||||
|     The `,` as the separator can be easily modified by passing the `separator` |     The `,` as the separator can be easily modified by passing the `separator` | ||||||
|     keyword when initializing the field. |     keyword when initializing the field. | ||||||
|  |  | ||||||
|  |     Note: To default the field to the current datetime, use: DateTimeField(default=datetime.utcnow) | ||||||
|  |  | ||||||
|     .. versionadded:: 0.5 |     .. versionadded:: 0.5 | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     def __init__(self, separator=',', **kwargs): |     def __init__(self, separator=',', **kwargs): | ||||||
|         self.names = ['year', 'month', 'day', 'hour', 'minute', 'second', 'microsecond'] |         """ | ||||||
|  |         :param separator: Allows to customize the separator used for storage (default ``,``) | ||||||
|  |         """ | ||||||
|         self.separator = separator |         self.separator = separator | ||||||
|         self.format = separator.join(['%Y', '%m', '%d', '%H', '%M', '%S', '%f']) |         self.format = separator.join(['%Y', '%m', '%d', '%H', '%M', '%S', '%f']) | ||||||
|         super(ComplexDateTimeField, self).__init__(**kwargs) |         super(ComplexDateTimeField, self).__init__(**kwargs) | ||||||
| @@ -572,20 +607,24 @@ class ComplexDateTimeField(StringField): | |||||||
|         >>> ComplexDateTimeField()._convert_from_string(a) |         >>> ComplexDateTimeField()._convert_from_string(a) | ||||||
|         datetime.datetime(2011, 6, 8, 20, 26, 24, 92284) |         datetime.datetime(2011, 6, 8, 20, 26, 24, 92284) | ||||||
|         """ |         """ | ||||||
|         values = map(int, data.split(self.separator)) |         values = [int(d) for d in data.split(self.separator)] | ||||||
|         return datetime.datetime(*values) |         return datetime.datetime(*values) | ||||||
|  |  | ||||||
|     def __get__(self, instance, owner): |     def __get__(self, instance, owner): | ||||||
|  |         if instance is None: | ||||||
|  |             return self | ||||||
|  |  | ||||||
|         data = super(ComplexDateTimeField, self).__get__(instance, owner) |         data = super(ComplexDateTimeField, self).__get__(instance, owner) | ||||||
|         if data is None: |  | ||||||
|             return None if self.null else datetime.datetime.now() |         if isinstance(data, datetime.datetime) or data is None: | ||||||
|         if isinstance(data, datetime.datetime): |  | ||||||
|             return data |             return data | ||||||
|         return self._convert_from_string(data) |         return self._convert_from_string(data) | ||||||
|  |  | ||||||
|     def __set__(self, instance, value): |     def __set__(self, instance, value): | ||||||
|         value = self._convert_from_datetime(value) if value else value |         super(ComplexDateTimeField, self).__set__(instance, value) | ||||||
|         return super(ComplexDateTimeField, self).__set__(instance, value) |         value = instance._data[self.name] | ||||||
|  |         if value is not None: | ||||||
|  |             instance._data[self.name] = self._convert_from_datetime(value) | ||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         value = self.to_python(value) |         value = self.to_python(value) | ||||||
| @@ -629,9 +668,17 @@ class EmbeddedDocumentField(BaseField): | |||||||
|     def document_type(self): |     def document_type(self): | ||||||
|         if isinstance(self.document_type_obj, six.string_types): |         if isinstance(self.document_type_obj, six.string_types): | ||||||
|             if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: |             if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: | ||||||
|                 self.document_type_obj = self.owner_document |                 resolved_document_type = self.owner_document | ||||||
|             else: |             else: | ||||||
|                 self.document_type_obj = get_document(self.document_type_obj) |                 resolved_document_type = get_document(self.document_type_obj) | ||||||
|  |  | ||||||
|  |             if not issubclass(resolved_document_type, EmbeddedDocument): | ||||||
|  |                 # Due to the late resolution of the document_type | ||||||
|  |                 # There is a chance that it won't be an EmbeddedDocument (#1661) | ||||||
|  |                 self.error('Invalid embedded document class provided to an ' | ||||||
|  |                            'EmbeddedDocumentField') | ||||||
|  |             self.document_type_obj = resolved_document_type | ||||||
|  |  | ||||||
|         return self.document_type_obj |         return self.document_type_obj | ||||||
|  |  | ||||||
|     def to_python(self, value): |     def to_python(self, value): | ||||||
| @@ -655,7 +702,11 @@ class EmbeddedDocumentField(BaseField): | |||||||
|         self.document_type.validate(value, clean) |         self.document_type.validate(value, clean) | ||||||
|  |  | ||||||
|     def lookup_member(self, member_name): |     def lookup_member(self, member_name): | ||||||
|         return self.document_type._fields.get(member_name) |         doc_and_subclasses = [self.document_type] + self.document_type.__subclasses__() | ||||||
|  |         for doc_type in doc_and_subclasses: | ||||||
|  |             field = doc_type._fields.get(member_name) | ||||||
|  |             if field: | ||||||
|  |                 return field | ||||||
|  |  | ||||||
|     def prepare_query_value(self, op, value): |     def prepare_query_value(self, op, value): | ||||||
|         if value is not None and not isinstance(value, self.document_type): |         if value is not None and not isinstance(value, self.document_type): | ||||||
| @@ -702,12 +753,13 @@ class GenericEmbeddedDocumentField(BaseField): | |||||||
|         value.validate(clean=clean) |         value.validate(clean=clean) | ||||||
|  |  | ||||||
|     def lookup_member(self, member_name): |     def lookup_member(self, member_name): | ||||||
|         if self.choices: |         document_choices = self.choices or [] | ||||||
|             for choice in self.choices: |         for document_choice in document_choices: | ||||||
|                 field = choice._fields.get(member_name) |             doc_and_subclasses = [document_choice] + document_choice.__subclasses__() | ||||||
|  |             for doc_type in doc_and_subclasses: | ||||||
|  |                 field = doc_type._fields.get(member_name) | ||||||
|                 if field: |                 if field: | ||||||
|                     return field |                     return field | ||||||
|         return None |  | ||||||
|  |  | ||||||
|     def to_mongo(self, document, use_db_field=True, fields=None): |     def to_mongo(self, document, use_db_field=True, fields=None): | ||||||
|         if document is None: |         if document is None: | ||||||
| @@ -750,12 +802,12 @@ class DynamicField(BaseField): | |||||||
|             value = {k: v for k, v in enumerate(value)} |             value = {k: v for k, v in enumerate(value)} | ||||||
|  |  | ||||||
|         data = {} |         data = {} | ||||||
|         for k, v in value.iteritems(): |         for k, v in iteritems(value): | ||||||
|             data[k] = self.to_mongo(v, use_db_field, fields) |             data[k] = self.to_mongo(v, use_db_field, fields) | ||||||
|  |  | ||||||
|         value = data |         value = data | ||||||
|         if is_list:  # Convert back to a list |         if is_list:  # Convert back to a list | ||||||
|             value = [v for k, v in sorted(data.iteritems(), key=itemgetter(0))] |             value = [v for k, v in sorted(iteritems(data), key=itemgetter(0))] | ||||||
|         return value |         return value | ||||||
|  |  | ||||||
|     def to_python(self, value): |     def to_python(self, value): | ||||||
| @@ -808,8 +860,7 @@ class ListField(ComplexBaseField): | |||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         """Make sure that a list of valid fields is being used.""" |         """Make sure that a list of valid fields is being used.""" | ||||||
|         if (not isinstance(value, (list, tuple, QuerySet)) or |         if not isinstance(value, (list, tuple, BaseQuerySet)): | ||||||
|                 isinstance(value, six.string_types)): |  | ||||||
|             self.error('Only lists and tuples may be used in a list field') |             self.error('Only lists and tuples may be used in a list field') | ||||||
|         super(ListField, self).validate(value) |         super(ListField, self).validate(value) | ||||||
|  |  | ||||||
| @@ -901,7 +952,7 @@ def key_has_dot_or_dollar(d): | |||||||
|     dictionary contains a dot or a dollar sign. |     dictionary contains a dot or a dollar sign. | ||||||
|     """ |     """ | ||||||
|     for k, v in d.items(): |     for k, v in d.items(): | ||||||
|         if ('.' in k or '$' in k) or (isinstance(v, dict) and key_has_dot_or_dollar(v)): |         if ('.' in k or k.startswith('$')) or (isinstance(v, dict) and key_has_dot_or_dollar(v)): | ||||||
|             return True |             return True | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -916,14 +967,9 @@ class DictField(ComplexBaseField): | |||||||
|     .. versionchanged:: 0.5 - Can now handle complex / varying types of data |     .. versionchanged:: 0.5 - Can now handle complex / varying types of data | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     def __init__(self, basecls=None, field=None, *args, **kwargs): |     def __init__(self, field=None, *args, **kwargs): | ||||||
|         self.field = field |         self.field = field | ||||||
|         self._auto_dereference = False |         self._auto_dereference = False | ||||||
|         self.basecls = basecls or BaseField |  | ||||||
|  |  | ||||||
|         # XXX ValidationError raised outside of the "validate" method. |  | ||||||
|         if not issubclass(self.basecls, BaseField): |  | ||||||
|             self.error('DictField only accepts dict values') |  | ||||||
|  |  | ||||||
|         kwargs.setdefault('default', lambda: {}) |         kwargs.setdefault('default', lambda: {}) | ||||||
|         super(DictField, self).__init__(*args, **kwargs) |         super(DictField, self).__init__(*args, **kwargs) | ||||||
| @@ -939,11 +985,11 @@ class DictField(ComplexBaseField): | |||||||
|             self.error(msg) |             self.error(msg) | ||||||
|         if key_has_dot_or_dollar(value): |         if key_has_dot_or_dollar(value): | ||||||
|             self.error('Invalid dictionary key name - keys may not contain "."' |             self.error('Invalid dictionary key name - keys may not contain "."' | ||||||
|                        ' or "$" characters') |                        ' or startswith "$" characters') | ||||||
|         super(DictField, self).validate(value) |         super(DictField, self).validate(value) | ||||||
|  |  | ||||||
|     def lookup_member(self, member_name): |     def lookup_member(self, member_name): | ||||||
|         return DictField(basecls=self.basecls, db_field=member_name) |         return DictField(db_field=member_name) | ||||||
|  |  | ||||||
|     def prepare_query_value(self, op, value): |     def prepare_query_value(self, op, value): | ||||||
|         match_operators = ['contains', 'icontains', 'startswith', |         match_operators = ['contains', 'icontains', 'startswith', | ||||||
| @@ -953,7 +999,7 @@ class DictField(ComplexBaseField): | |||||||
|         if op in match_operators and isinstance(value, six.string_types): |         if op in match_operators and isinstance(value, six.string_types): | ||||||
|             return StringField().prepare_query_value(op, value) |             return StringField().prepare_query_value(op, value) | ||||||
|  |  | ||||||
|         if hasattr(self.field, 'field'): |         if hasattr(self.field, 'field'):    # Used for instance when using DictField(ListField(IntField())) | ||||||
|             if op in ('set', 'unset') and isinstance(value, dict): |             if op in ('set', 'unset') and isinstance(value, dict): | ||||||
|                 return { |                 return { | ||||||
|                     k: self.field.prepare_query_value(op, v) |                     k: self.field.prepare_query_value(op, v) | ||||||
| @@ -1011,11 +1057,13 @@ class ReferenceField(BaseField): | |||||||
|  |  | ||||||
|     .. code-block:: python |     .. code-block:: python | ||||||
|  |  | ||||||
|         class Bar(Document): |         class Org(Document): | ||||||
|             content = StringField() |             owner = ReferenceField('User') | ||||||
|             foo = ReferenceField('Foo') |  | ||||||
|  |  | ||||||
|         Foo.register_delete_rule(Bar, 'foo', NULLIFY) |         class User(Document): | ||||||
|  |             org = ReferenceField('Org', reverse_delete_rule=CASCADE) | ||||||
|  |  | ||||||
|  |         User.register_delete_rule(Org, 'owner', DENY) | ||||||
|  |  | ||||||
|     .. versionchanged:: 0.5 added `reverse_delete_rule` |     .. versionchanged:: 0.5 added `reverse_delete_rule` | ||||||
|     """ |     """ | ||||||
| @@ -1063,9 +1111,9 @@ class ReferenceField(BaseField): | |||||||
|  |  | ||||||
|         # Get value from document instance if available |         # Get value from document instance if available | ||||||
|         value = instance._data.get(self.name) |         value = instance._data.get(self.name) | ||||||
|         self._auto_dereference = instance._fields[self.name]._auto_dereference |         auto_dereference = instance._fields[self.name]._auto_dereference | ||||||
|         # Dereference DBRefs |         # Dereference DBRefs | ||||||
|         if self._auto_dereference and isinstance(value, DBRef): |         if auto_dereference and isinstance(value, DBRef): | ||||||
|             if hasattr(value, 'cls'): |             if hasattr(value, 'cls'): | ||||||
|                 # Dereference using the class type specified in the reference |                 # Dereference using the class type specified in the reference | ||||||
|                 cls = get_document(value.cls) |                 cls = get_document(value.cls) | ||||||
| @@ -1136,16 +1184,6 @@ class ReferenceField(BaseField): | |||||||
|             self.error('You can only reference documents once they have been ' |             self.error('You can only reference documents once they have been ' | ||||||
|                        'saved to the database') |                        'saved to the database') | ||||||
|  |  | ||||||
|         if ( |  | ||||||
|             self.document_type._meta.get('abstract') and |  | ||||||
|             not isinstance(value, self.document_type) |  | ||||||
|         ): |  | ||||||
|             self.error( |  | ||||||
|                 '%s is not an instance of abstract reference type %s' % ( |  | ||||||
|                     self.document_type._class_name |  | ||||||
|                 ) |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|     def lookup_member(self, member_name): |     def lookup_member(self, member_name): | ||||||
|         return self.document_type._fields.get(member_name) |         return self.document_type._fields.get(member_name) | ||||||
|  |  | ||||||
| @@ -1226,9 +1264,10 @@ class CachedReferenceField(BaseField): | |||||||
|  |  | ||||||
|         # Get value from document instance if available |         # Get value from document instance if available | ||||||
|         value = instance._data.get(self.name) |         value = instance._data.get(self.name) | ||||||
|         self._auto_dereference = instance._fields[self.name]._auto_dereference |         auto_dereference = instance._fields[self.name]._auto_dereference | ||||||
|  |  | ||||||
|         # Dereference DBRefs |         # Dereference DBRefs | ||||||
|         if self._auto_dereference and isinstance(value, DBRef): |         if auto_dereference and isinstance(value, DBRef): | ||||||
|             dereferenced = self.document_type._get_db().dereference(value) |             dereferenced = self.document_type._get_db().dereference(value) | ||||||
|             if dereferenced is None: |             if dereferenced is None: | ||||||
|                 raise DoesNotExist('Trying to dereference unknown document %s' % value) |                 raise DoesNotExist('Trying to dereference unknown document %s' % value) | ||||||
| @@ -1361,8 +1400,8 @@ class GenericReferenceField(BaseField): | |||||||
|  |  | ||||||
|         value = instance._data.get(self.name) |         value = instance._data.get(self.name) | ||||||
|  |  | ||||||
|         self._auto_dereference = instance._fields[self.name]._auto_dereference |         auto_dereference = instance._fields[self.name]._auto_dereference | ||||||
|         if self._auto_dereference and isinstance(value, (dict, SON)): |         if auto_dereference and isinstance(value, (dict, SON)): | ||||||
|             dereferenced = self.dereference(value) |             dereferenced = self.dereference(value) | ||||||
|             if dereferenced is None: |             if dereferenced is None: | ||||||
|                 raise DoesNotExist('Trying to dereference unknown document %s' % value) |                 raise DoesNotExist('Trying to dereference unknown document %s' % value) | ||||||
| @@ -1444,14 +1483,20 @@ class BinaryField(BaseField): | |||||||
|         return Binary(value) |         return Binary(value) | ||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         if not isinstance(value, (six.binary_type, six.text_type, Binary)): |         if not isinstance(value, (six.binary_type, Binary)): | ||||||
|             self.error('BinaryField only accepts instances of ' |             self.error('BinaryField only accepts instances of ' | ||||||
|                        '(%s, %s, Binary)' % ( |                        '(%s, %s, Binary)' % ( | ||||||
|                            six.binary_type.__name__, six.text_type.__name__)) |                            six.binary_type.__name__, Binary.__name__)) | ||||||
|  |  | ||||||
|         if self.max_bytes is not None and len(value) > self.max_bytes: |         if self.max_bytes is not None and len(value) > self.max_bytes: | ||||||
|             self.error('Binary value is too long') |             self.error('Binary value is too long') | ||||||
|  |  | ||||||
|  |     def prepare_query_value(self, op, value): | ||||||
|  |         if value is None: | ||||||
|  |             return value | ||||||
|  |         return super(BinaryField, self).prepare_query_value( | ||||||
|  |             op, self.to_mongo(value)) | ||||||
|  |  | ||||||
|  |  | ||||||
| class GridFSError(Exception): | class GridFSError(Exception): | ||||||
|     pass |     pass | ||||||
| @@ -1492,9 +1537,11 @@ class GridFSProxy(object): | |||||||
|     def __get__(self, instance, value): |     def __get__(self, instance, value): | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
|     def __nonzero__(self): |     def __bool__(self): | ||||||
|         return bool(self.grid_id) |         return bool(self.grid_id) | ||||||
|  |  | ||||||
|  |     __nonzero__ = __bool__  # For Py2 support | ||||||
|  |  | ||||||
|     def __getstate__(self): |     def __getstate__(self): | ||||||
|         self_dict = self.__dict__ |         self_dict = self.__dict__ | ||||||
|         self_dict['_fs'] = None |         self_dict['_fs'] = None | ||||||
| @@ -1512,9 +1559,9 @@ class GridFSProxy(object): | |||||||
|         return '<%s: %s>' % (self.__class__.__name__, self.grid_id) |         return '<%s: %s>' % (self.__class__.__name__, self.grid_id) | ||||||
|  |  | ||||||
|     def __str__(self): |     def __str__(self): | ||||||
|         name = getattr( |         gridout = self.get() | ||||||
|             self.get(), 'filename', self.grid_id) if self.get() else '(no file)' |         filename = getattr(gridout, 'filename') if gridout else '<no file>' | ||||||
|         return '<%s: %s>' % (self.__class__.__name__, name) |         return '<%s: %s (%s)>' % (self.__class__.__name__, filename, self.grid_id) | ||||||
|  |  | ||||||
|     def __eq__(self, other): |     def __eq__(self, other): | ||||||
|         if isinstance(other, GridFSProxy): |         if isinstance(other, GridFSProxy): | ||||||
| @@ -1834,12 +1881,9 @@ class ImageField(FileField): | |||||||
|     """ |     """ | ||||||
|     A Image File storage field. |     A Image File storage field. | ||||||
|  |  | ||||||
|     @size (width, height, force): |     :param size: max size to store images, provided as (width, height, force) | ||||||
|         max size to store images, if larger will be automatically resized |         if larger, it will be automatically resized (ex: size=(800, 600, True)) | ||||||
|         ex: size=(800, 600, True) |     :param thumbnail_size: size to generate a thumbnail, provided as (width, height, force) | ||||||
|  |  | ||||||
|     @thumbnail (width, height, force): |  | ||||||
|         size to generate a thumbnail |  | ||||||
|  |  | ||||||
|     .. versionadded:: 0.6 |     .. versionadded:: 0.6 | ||||||
|     """ |     """ | ||||||
| @@ -1910,8 +1954,7 @@ class SequenceField(BaseField): | |||||||
|         self.collection_name = collection_name or self.COLLECTION_NAME |         self.collection_name = collection_name or self.COLLECTION_NAME | ||||||
|         self.db_alias = db_alias or DEFAULT_CONNECTION_NAME |         self.db_alias = db_alias or DEFAULT_CONNECTION_NAME | ||||||
|         self.sequence_name = sequence_name |         self.sequence_name = sequence_name | ||||||
|         self.value_decorator = (callable(value_decorator) and |         self.value_decorator = value_decorator if callable(value_decorator) else self.VALUE_DECORATOR | ||||||
|                                 value_decorator or self.VALUE_DECORATOR) |  | ||||||
|         super(SequenceField, self).__init__(*args, **kwargs) |         super(SequenceField, self).__init__(*args, **kwargs) | ||||||
|  |  | ||||||
|     def generate(self): |     def generate(self): | ||||||
| @@ -2020,7 +2063,7 @@ class UUIDField(BaseField): | |||||||
|                 if not isinstance(value, six.string_types): |                 if not isinstance(value, six.string_types): | ||||||
|                     value = six.text_type(value) |                     value = six.text_type(value) | ||||||
|                 return uuid.UUID(value) |                 return uuid.UUID(value) | ||||||
|             except Exception: |             except (ValueError, TypeError, AttributeError): | ||||||
|                 return original_value |                 return original_value | ||||||
|         return value |         return value | ||||||
|  |  | ||||||
| @@ -2042,7 +2085,7 @@ class UUIDField(BaseField): | |||||||
|                 value = str(value) |                 value = str(value) | ||||||
|             try: |             try: | ||||||
|                 uuid.UUID(value) |                 uuid.UUID(value) | ||||||
|             except Exception as exc: |             except (ValueError, TypeError, AttributeError) as exc: | ||||||
|                 self.error('Could not convert to UUID: %s' % exc) |                 self.error('Could not convert to UUID: %s' % exc) | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -2222,9 +2265,9 @@ class LazyReferenceField(BaseField): | |||||||
|         :param reverse_delete_rule: Determines what to do when the referring |         :param reverse_delete_rule: Determines what to do when the referring | ||||||
|           object is deleted |           object is deleted | ||||||
|         :param passthrough: When trying to access unknown fields, the |         :param passthrough: When trying to access unknown fields, the | ||||||
|         :class:`~mongoengine.base.datastructure.LazyReference` instance will |           :class:`~mongoengine.base.datastructure.LazyReference` instance will | ||||||
|         automatically call `fetch()` and try to retrive the field on the fetched |           automatically call `fetch()` and try to retrive the field on the fetched | ||||||
|         document. Note this only work getting field (not setting or deleting). |           document. Note this only work getting field (not setting or deleting). | ||||||
|         """ |         """ | ||||||
|         # XXX ValidationError raised outside of the "validate" method. |         # XXX ValidationError raised outside of the "validate" method. | ||||||
|         if ( |         if ( | ||||||
|   | |||||||
							
								
								
									
										19
									
								
								mongoengine/mongodb_support.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								mongoengine/mongodb_support.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,19 @@ | |||||||
|  | """ | ||||||
|  | Helper functions, constants, and types to aid with MongoDB version support | ||||||
|  | """ | ||||||
|  | from mongoengine.connection import get_connection | ||||||
|  |  | ||||||
|  |  | ||||||
|  | # Constant that can be used to compare the version retrieved with | ||||||
|  | # get_mongodb_version() | ||||||
|  | MONGODB_34 = (3, 4) | ||||||
|  | MONGODB_36 = (3, 6) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def get_mongodb_version(): | ||||||
|  |     """Return the version of the connected mongoDB (first 2 digits) | ||||||
|  |  | ||||||
|  |     :return: tuple(int, int) | ||||||
|  |     """ | ||||||
|  |     version_list = get_connection().server_info()['versionArray'][:2]     # e.g: (3, 2) | ||||||
|  |     return tuple(version_list) | ||||||
							
								
								
									
										32
									
								
								mongoengine/pymongo_support.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								mongoengine/pymongo_support.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,32 @@ | |||||||
|  | """ | ||||||
|  | Helper functions, constants, and types to aid with PyMongo v2.7 - v3.x support. | ||||||
|  | """ | ||||||
|  | import pymongo | ||||||
|  |  | ||||||
|  | _PYMONGO_37 = (3, 7) | ||||||
|  |  | ||||||
|  | PYMONGO_VERSION = tuple(pymongo.version_tuple[:2]) | ||||||
|  |  | ||||||
|  | IS_PYMONGO_GTE_37 = PYMONGO_VERSION >= _PYMONGO_37 | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def count_documents(collection, filter): | ||||||
|  |     """Pymongo>3.7 deprecates count in favour of count_documents""" | ||||||
|  |     if IS_PYMONGO_GTE_37: | ||||||
|  |         return collection.count_documents(filter) | ||||||
|  |     else: | ||||||
|  |         count = collection.find(filter).count() | ||||||
|  |     return count | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def list_collection_names(db, include_system_collections=False): | ||||||
|  |     """Pymongo>3.7 deprecates collection_names in favour of list_collection_names""" | ||||||
|  |     if IS_PYMONGO_GTE_37: | ||||||
|  |         collections = db.list_collection_names() | ||||||
|  |     else: | ||||||
|  |         collections = db.collection_names() | ||||||
|  |  | ||||||
|  |     if not include_system_collections: | ||||||
|  |         collections = [c for c in collections if not c.startswith('system.')] | ||||||
|  |  | ||||||
|  |     return collections | ||||||
| @@ -1,17 +1,8 @@ | |||||||
| """ | """ | ||||||
| Helper functions, constants, and types to aid with Python v2.7 - v3.x and | Helper functions, constants, and types to aid with Python v2.7 - v3.x support | ||||||
| PyMongo v2.7 - v3.x support. |  | ||||||
| """ | """ | ||||||
| import pymongo |  | ||||||
| import six | import six | ||||||
|  |  | ||||||
|  |  | ||||||
| if pymongo.version_tuple[0] < 3: |  | ||||||
|     IS_PYMONGO_3 = False |  | ||||||
| else: |  | ||||||
|     IS_PYMONGO_3 = True |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3. | # six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3. | ||||||
| StringIO = six.BytesIO | StringIO = six.BytesIO | ||||||
|  |  | ||||||
| @@ -23,3 +14,10 @@ if not six.PY3: | |||||||
|         pass |         pass | ||||||
|     else: |     else: | ||||||
|         StringIO = cStringIO.StringIO |         StringIO = cStringIO.StringIO | ||||||
|  |  | ||||||
|  |  | ||||||
|  | if six.PY3: | ||||||
|  |     from collections.abc import Hashable | ||||||
|  | else: | ||||||
|  |     # raises DeprecationWarnings in Python >=3.7 | ||||||
|  |     from collections import Hashable | ||||||
|   | |||||||
| @@ -2,7 +2,6 @@ from __future__ import absolute_import | |||||||
|  |  | ||||||
| import copy | import copy | ||||||
| import itertools | import itertools | ||||||
| import operator |  | ||||||
| import pprint | import pprint | ||||||
| import re | import re | ||||||
| import warnings | import warnings | ||||||
| @@ -11,8 +10,10 @@ from bson import SON, json_util | |||||||
| from bson.code import Code | from bson.code import Code | ||||||
| import pymongo | import pymongo | ||||||
| import pymongo.errors | import pymongo.errors | ||||||
|  | from pymongo.collection import ReturnDocument | ||||||
| from pymongo.common import validate_read_preference | from pymongo.common import validate_read_preference | ||||||
| import six | import six | ||||||
|  | from six import iteritems | ||||||
|  |  | ||||||
| from mongoengine import signals | from mongoengine import signals | ||||||
| from mongoengine.base import get_document | from mongoengine.base import get_document | ||||||
| @@ -21,14 +22,10 @@ from mongoengine.connection import get_db | |||||||
| from mongoengine.context_managers import set_write_concern, switch_db | from mongoengine.context_managers import set_write_concern, switch_db | ||||||
| from mongoengine.errors import (InvalidQueryError, LookUpError, | from mongoengine.errors import (InvalidQueryError, LookUpError, | ||||||
|                                 NotUniqueError, OperationError) |                                 NotUniqueError, OperationError) | ||||||
| from mongoengine.python_support import IS_PYMONGO_3 |  | ||||||
| from mongoengine.queryset import transform | from mongoengine.queryset import transform | ||||||
| from mongoengine.queryset.field_list import QueryFieldList | from mongoengine.queryset.field_list import QueryFieldList | ||||||
| from mongoengine.queryset.visitor import Q, QNode | from mongoengine.queryset.visitor import Q, QNode | ||||||
|  |  | ||||||
| if IS_PYMONGO_3: |  | ||||||
|     from pymongo.collection import ReturnDocument |  | ||||||
|  |  | ||||||
|  |  | ||||||
| __all__ = ('BaseQuerySet', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL') | __all__ = ('BaseQuerySet', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL') | ||||||
|  |  | ||||||
| @@ -39,8 +36,6 @@ CASCADE = 2 | |||||||
| DENY = 3 | DENY = 3 | ||||||
| PULL = 4 | PULL = 4 | ||||||
|  |  | ||||||
| RE_TYPE = type(re.compile('')) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class BaseQuerySet(object): | class BaseQuerySet(object): | ||||||
|     """A set of results returned from a query. Wraps a MongoDB cursor, |     """A set of results returned from a query. Wraps a MongoDB cursor, | ||||||
| @@ -191,7 +186,7 @@ class BaseQuerySet(object): | |||||||
|                 ) |                 ) | ||||||
|  |  | ||||||
|             if queryset._as_pymongo: |             if queryset._as_pymongo: | ||||||
|                 return queryset._get_as_pymongo(queryset._cursor[key]) |                 return queryset._cursor[key] | ||||||
|  |  | ||||||
|             return queryset._document._from_son( |             return queryset._document._from_son( | ||||||
|                 queryset._cursor[key], |                 queryset._cursor[key], | ||||||
| @@ -199,7 +194,7 @@ class BaseQuerySet(object): | |||||||
|                 only_fields=self.only_fields |                 only_fields=self.only_fields | ||||||
|             ) |             ) | ||||||
|  |  | ||||||
|         raise AttributeError('Provide a slice or an integer index') |         raise TypeError('Provide a slice or an integer index') | ||||||
|  |  | ||||||
|     def __iter__(self): |     def __iter__(self): | ||||||
|         raise NotImplementedError |         raise NotImplementedError | ||||||
| @@ -209,18 +204,16 @@ class BaseQuerySet(object): | |||||||
|         queryset = self.order_by() |         queryset = self.order_by() | ||||||
|         return False if queryset.first() is None else True |         return False if queryset.first() is None else True | ||||||
|  |  | ||||||
|     def __nonzero__(self): |  | ||||||
|         """Avoid to open all records in an if stmt in Py2.""" |  | ||||||
|         return self._has_data() |  | ||||||
|  |  | ||||||
|     def __bool__(self): |     def __bool__(self): | ||||||
|         """Avoid to open all records in an if stmt in Py3.""" |         """Avoid to open all records in an if stmt in Py3.""" | ||||||
|         return self._has_data() |         return self._has_data() | ||||||
|  |  | ||||||
|  |     __nonzero__ = __bool__  # For Py2 support | ||||||
|  |  | ||||||
|     # Core functions |     # Core functions | ||||||
|  |  | ||||||
|     def all(self): |     def all(self): | ||||||
|         """Returns all documents.""" |         """Returns a copy of the current QuerySet.""" | ||||||
|         return self.__call__() |         return self.__call__() | ||||||
|  |  | ||||||
|     def filter(self, *q_objs, **query): |     def filter(self, *q_objs, **query): | ||||||
| @@ -269,13 +262,13 @@ class BaseQuerySet(object): | |||||||
|         queryset = queryset.filter(*q_objs, **query) |         queryset = queryset.filter(*q_objs, **query) | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             result = queryset.next() |             result = six.next(queryset) | ||||||
|         except StopIteration: |         except StopIteration: | ||||||
|             msg = ('%s matching query does not exist.' |             msg = ('%s matching query does not exist.' | ||||||
|                    % queryset._document._class_name) |                    % queryset._document._class_name) | ||||||
|             raise queryset._document.DoesNotExist(msg) |             raise queryset._document.DoesNotExist(msg) | ||||||
|         try: |         try: | ||||||
|             queryset.next() |             six.next(queryset) | ||||||
|         except StopIteration: |         except StopIteration: | ||||||
|             return result |             return result | ||||||
|  |  | ||||||
| @@ -342,7 +335,7 @@ class BaseQuerySet(object): | |||||||
|                        % str(self._document)) |                        % str(self._document)) | ||||||
|                 raise OperationError(msg) |                 raise OperationError(msg) | ||||||
|             if doc.pk and not doc._created: |             if doc.pk and not doc._created: | ||||||
|                 msg = 'Some documents have ObjectIds use doc.update() instead' |                 msg = 'Some documents have ObjectIds, use doc.update() instead' | ||||||
|                 raise OperationError(msg) |                 raise OperationError(msg) | ||||||
|  |  | ||||||
|         signal_kwargs = signal_kwargs or {} |         signal_kwargs = signal_kwargs or {} | ||||||
| @@ -359,7 +352,7 @@ class BaseQuerySet(object): | |||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             inserted_result = insert_func(raw) |             inserted_result = insert_func(raw) | ||||||
|             ids = return_one and [inserted_result.inserted_id] or inserted_result.inserted_ids |             ids = [inserted_result.inserted_id] if return_one else inserted_result.inserted_ids | ||||||
|         except pymongo.errors.DuplicateKeyError as err: |         except pymongo.errors.DuplicateKeyError as err: | ||||||
|             message = 'Could not save document (%s)' |             message = 'Could not save document (%s)' | ||||||
|             raise NotUniqueError(message % six.text_type(err)) |             raise NotUniqueError(message % six.text_type(err)) | ||||||
| @@ -377,17 +370,20 @@ class BaseQuerySet(object): | |||||||
|                 raise NotUniqueError(message % six.text_type(err)) |                 raise NotUniqueError(message % six.text_type(err)) | ||||||
|             raise OperationError(message % six.text_type(err)) |             raise OperationError(message % six.text_type(err)) | ||||||
|  |  | ||||||
|  |         # Apply inserted_ids to documents | ||||||
|  |         for doc, doc_id in zip(docs, ids): | ||||||
|  |             doc.pk = doc_id | ||||||
|  |  | ||||||
|         if not load_bulk: |         if not load_bulk: | ||||||
|             signals.post_bulk_insert.send( |             signals.post_bulk_insert.send( | ||||||
|                 self._document, documents=docs, loaded=False, **signal_kwargs) |                 self._document, documents=docs, loaded=False, **signal_kwargs) | ||||||
|             return return_one and ids[0] or ids |             return ids[0] if return_one else ids | ||||||
|  |  | ||||||
|         documents = self.in_bulk(ids) |         documents = self.in_bulk(ids) | ||||||
|         results = [] |         results = [documents.get(obj_id) for obj_id in ids] | ||||||
|         for obj_id in ids: |  | ||||||
|             results.append(documents.get(obj_id)) |  | ||||||
|         signals.post_bulk_insert.send( |         signals.post_bulk_insert.send( | ||||||
|             self._document, documents=results, loaded=True, **signal_kwargs) |             self._document, documents=results, loaded=True, **signal_kwargs) | ||||||
|         return return_one and results[0] or results |         return results[0] if return_one else results | ||||||
|  |  | ||||||
|     def count(self, with_limit_and_skip=False): |     def count(self, with_limit_and_skip=False): | ||||||
|         """Count the selected elements in the query. |         """Count the selected elements in the query. | ||||||
| @@ -396,9 +392,11 @@ class BaseQuerySet(object): | |||||||
|             :meth:`skip` that has been applied to this cursor into account when |             :meth:`skip` that has been applied to this cursor into account when | ||||||
|             getting the count |             getting the count | ||||||
|         """ |         """ | ||||||
|         if self._limit == 0 and with_limit_and_skip or self._none: |         if self._limit == 0 and with_limit_and_skip is False or self._none: | ||||||
|             return 0 |             return 0 | ||||||
|         return self._cursor.count(with_limit_and_skip=with_limit_and_skip) |         count = self._cursor.count(with_limit_and_skip=with_limit_and_skip) | ||||||
|  |         self._cursor_obj = None | ||||||
|  |         return count | ||||||
|  |  | ||||||
|     def delete(self, write_concern=None, _from_doc_delete=False, |     def delete(self, write_concern=None, _from_doc_delete=False, | ||||||
|                cascade_refs=None): |                cascade_refs=None): | ||||||
| @@ -498,11 +496,12 @@ class BaseQuerySet(object): | |||||||
|             ``save(..., write_concern={w: 2, fsync: True}, ...)`` will |             ``save(..., write_concern={w: 2, fsync: True}, ...)`` will | ||||||
|             wait until at least two servers have recorded the write and |             wait until at least two servers have recorded the write and | ||||||
|             will force an fsync on the primary server. |             will force an fsync on the primary server. | ||||||
|         :param full_result: Return the full result dictionary rather than just the number |         :param full_result: Return the associated ``pymongo.UpdateResult`` rather than just the number | ||||||
|             updated, e.g. return |             updated items | ||||||
|             ``{'n': 2, 'nModified': 2, 'ok': 1.0, 'updatedExisting': True}``. |  | ||||||
|         :param update: Django-style update keyword arguments |         :param update: Django-style update keyword arguments | ||||||
|  |  | ||||||
|  |         :returns the number of updated documents (unless ``full_result`` is True) | ||||||
|  |  | ||||||
|         .. versionadded:: 0.2 |         .. versionadded:: 0.2 | ||||||
|         """ |         """ | ||||||
|         if not update and not upsert: |         if not update and not upsert: | ||||||
| @@ -566,7 +565,7 @@ class BaseQuerySet(object): | |||||||
|             document = self._document.objects.with_id(atomic_update.upserted_id) |             document = self._document.objects.with_id(atomic_update.upserted_id) | ||||||
|         return document |         return document | ||||||
|  |  | ||||||
|     def update_one(self, upsert=False, write_concern=None, **update): |     def update_one(self, upsert=False, write_concern=None, full_result=False, **update): | ||||||
|         """Perform an atomic update on the fields of the first document |         """Perform an atomic update on the fields of the first document | ||||||
|         matched by the query. |         matched by the query. | ||||||
|  |  | ||||||
| @@ -577,12 +576,19 @@ class BaseQuerySet(object): | |||||||
|             ``save(..., write_concern={w: 2, fsync: True}, ...)`` will |             ``save(..., write_concern={w: 2, fsync: True}, ...)`` will | ||||||
|             wait until at least two servers have recorded the write and |             wait until at least two servers have recorded the write and | ||||||
|             will force an fsync on the primary server. |             will force an fsync on the primary server. | ||||||
|  |         :param full_result: Return the associated ``pymongo.UpdateResult`` rather than just the number | ||||||
|  |             updated items | ||||||
|         :param update: Django-style update keyword arguments |         :param update: Django-style update keyword arguments | ||||||
|  |             full_result | ||||||
|  |         :returns the number of updated documents (unless ``full_result`` is True) | ||||||
|         .. versionadded:: 0.2 |         .. versionadded:: 0.2 | ||||||
|         """ |         """ | ||||||
|         return self.update( |         return self.update( | ||||||
|             upsert=upsert, multi=False, write_concern=write_concern, **update) |             upsert=upsert, | ||||||
|  |             multi=False, | ||||||
|  |             write_concern=write_concern, | ||||||
|  |             full_result=full_result, | ||||||
|  |             **update) | ||||||
|  |  | ||||||
|     def modify(self, upsert=False, full_response=False, remove=False, new=False, **update): |     def modify(self, upsert=False, full_response=False, remove=False, new=False, **update): | ||||||
|         """Update and return the updated document. |         """Update and return the updated document. | ||||||
| @@ -617,31 +623,25 @@ class BaseQuerySet(object): | |||||||
|  |  | ||||||
|         queryset = self.clone() |         queryset = self.clone() | ||||||
|         query = queryset._query |         query = queryset._query | ||||||
|         if not IS_PYMONGO_3 or not remove: |         if not remove: | ||||||
|             update = transform.update(queryset._document, **update) |             update = transform.update(queryset._document, **update) | ||||||
|         sort = queryset._ordering |         sort = queryset._ordering | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             if IS_PYMONGO_3: |             if full_response: | ||||||
|                 if full_response: |                 msg = 'With PyMongo 3+, it is not possible anymore to get the full response.' | ||||||
|                     msg = 'With PyMongo 3+, it is not possible anymore to get the full response.' |                 warnings.warn(msg, DeprecationWarning) | ||||||
|                     warnings.warn(msg, DeprecationWarning) |             if remove: | ||||||
|                 if remove: |                 result = queryset._collection.find_one_and_delete( | ||||||
|                     result = queryset._collection.find_one_and_delete( |                     query, sort=sort, **self._cursor_args) | ||||||
|                         query, sort=sort, **self._cursor_args) |  | ||||||
|                 else: |  | ||||||
|                     if new: |  | ||||||
|                         return_doc = ReturnDocument.AFTER |  | ||||||
|                     else: |  | ||||||
|                         return_doc = ReturnDocument.BEFORE |  | ||||||
|                     result = queryset._collection.find_one_and_update( |  | ||||||
|                         query, update, upsert=upsert, sort=sort, return_document=return_doc, |  | ||||||
|                         **self._cursor_args) |  | ||||||
|  |  | ||||||
|             else: |             else: | ||||||
|                 result = queryset._collection.find_and_modify( |                 if new: | ||||||
|                     query, update, upsert=upsert, sort=sort, remove=remove, new=new, |                     return_doc = ReturnDocument.AFTER | ||||||
|                     full_response=full_response, **self._cursor_args) |                 else: | ||||||
|  |                     return_doc = ReturnDocument.BEFORE | ||||||
|  |                 result = queryset._collection.find_one_and_update( | ||||||
|  |                     query, update, upsert=upsert, sort=sort, return_document=return_doc, | ||||||
|  |                     **self._cursor_args) | ||||||
|         except pymongo.errors.DuplicateKeyError as err: |         except pymongo.errors.DuplicateKeyError as err: | ||||||
|             raise NotUniqueError(u'Update failed (%s)' % err) |             raise NotUniqueError(u'Update failed (%s)' % err) | ||||||
|         except pymongo.errors.OperationFailure as err: |         except pymongo.errors.OperationFailure as err: | ||||||
| @@ -690,7 +690,7 @@ class BaseQuerySet(object): | |||||||
|                     self._document._from_son(doc, only_fields=self.only_fields)) |                     self._document._from_son(doc, only_fields=self.only_fields)) | ||||||
|         elif self._as_pymongo: |         elif self._as_pymongo: | ||||||
|             for doc in docs: |             for doc in docs: | ||||||
|                 doc_map[doc['_id']] = self._get_as_pymongo(doc) |                 doc_map[doc['_id']] = doc | ||||||
|         else: |         else: | ||||||
|             for doc in docs: |             for doc in docs: | ||||||
|                 doc_map[doc['_id']] = self._document._from_son( |                 doc_map[doc['_id']] = self._document._from_son( | ||||||
| @@ -748,7 +748,7 @@ class BaseQuerySet(object): | |||||||
|                       '_read_preference', '_iter', '_scalar', '_as_pymongo', |                       '_read_preference', '_iter', '_scalar', '_as_pymongo', | ||||||
|                       '_limit', '_skip', '_hint', '_auto_dereference', |                       '_limit', '_skip', '_hint', '_auto_dereference', | ||||||
|                       '_search_text', 'only_fields', '_max_time_ms', |                       '_search_text', 'only_fields', '_max_time_ms', | ||||||
|                       '_comment') |                       '_comment', '_batch_size') | ||||||
|  |  | ||||||
|         for prop in copy_props: |         for prop in copy_props: | ||||||
|             val = getattr(self, prop) |             val = getattr(self, prop) | ||||||
| @@ -775,10 +775,11 @@ class BaseQuerySet(object): | |||||||
|         """Limit the number of returned documents to `n`. This may also be |         """Limit the number of returned documents to `n`. This may also be | ||||||
|         achieved using array-slicing syntax (e.g. ``User.objects[:5]``). |         achieved using array-slicing syntax (e.g. ``User.objects[:5]``). | ||||||
|  |  | ||||||
|         :param n: the maximum number of objects to return |         :param n: the maximum number of objects to return if n is greater than 0. | ||||||
|  |         When 0 is passed, returns all the documents in the cursor | ||||||
|         """ |         """ | ||||||
|         queryset = self.clone() |         queryset = self.clone() | ||||||
|         queryset._limit = n if n != 0 else 1 |         queryset._limit = n | ||||||
|  |  | ||||||
|         # If a cursor object has already been created, apply the limit to it. |         # If a cursor object has already been created, apply the limit to it. | ||||||
|         if queryset._cursor_obj: |         if queryset._cursor_obj: | ||||||
| @@ -976,11 +977,10 @@ class BaseQuerySet(object): | |||||||
|         # explicitly included, and then more complicated operators such as |         # explicitly included, and then more complicated operators such as | ||||||
|         # $slice. |         # $slice. | ||||||
|         def _sort_key(field_tuple): |         def _sort_key(field_tuple): | ||||||
|             key, value = field_tuple |             _, value = field_tuple | ||||||
|             if isinstance(value, (int)): |             if isinstance(value, int): | ||||||
|                 return value  # 0 for exclusion, 1 for inclusion |                 return value  # 0 for exclusion, 1 for inclusion | ||||||
|             else: |             return 2  # so that complex values appear last | ||||||
|                 return 2  # so that complex values appear last |  | ||||||
|  |  | ||||||
|         fields = sorted(cleaned_fields, key=_sort_key) |         fields = sorted(cleaned_fields, key=_sort_key) | ||||||
|  |  | ||||||
| @@ -1009,13 +1009,15 @@ class BaseQuerySet(object): | |||||||
|         return queryset |         return queryset | ||||||
|  |  | ||||||
|     def order_by(self, *keys): |     def order_by(self, *keys): | ||||||
|         """Order the :class:`~mongoengine.queryset.QuerySet` by the keys. The |         """Order the :class:`~mongoengine.queryset.QuerySet` by the given keys. | ||||||
|         order may be specified by prepending each of the keys by a + or a -. |  | ||||||
|         Ascending order is assumed. If no keys are passed, existing ordering |         The order may be specified by prepending each of the keys by a "+" or | ||||||
|         is cleared instead. |         a "-". Ascending order is assumed if there's no prefix. | ||||||
|  |  | ||||||
|  |         If no keys are passed, existing ordering is cleared instead. | ||||||
|  |  | ||||||
|         :param keys: fields to order the query results by; keys may be |         :param keys: fields to order the query results by; keys may be | ||||||
|             prefixed with **+** or **-** to determine the ordering direction |             prefixed with "+" or a "-" to determine the ordering direction. | ||||||
|         """ |         """ | ||||||
|         queryset = self.clone() |         queryset = self.clone() | ||||||
|  |  | ||||||
| @@ -1073,15 +1075,14 @@ class BaseQuerySet(object): | |||||||
|         ..versionchanged:: 0.5 - made chainable |         ..versionchanged:: 0.5 - made chainable | ||||||
|         .. deprecated:: Ignored with PyMongo 3+ |         .. deprecated:: Ignored with PyMongo 3+ | ||||||
|         """ |         """ | ||||||
|         if IS_PYMONGO_3: |         msg = 'snapshot is deprecated as it has no impact when using PyMongo 3+.' | ||||||
|             msg = 'snapshot is deprecated as it has no impact when using PyMongo 3+.' |         warnings.warn(msg, DeprecationWarning) | ||||||
|             warnings.warn(msg, DeprecationWarning) |  | ||||||
|         queryset = self.clone() |         queryset = self.clone() | ||||||
|         queryset._snapshot = enabled |         queryset._snapshot = enabled | ||||||
|         return queryset |         return queryset | ||||||
|  |  | ||||||
|     def timeout(self, enabled): |     def timeout(self, enabled): | ||||||
|         """Enable or disable the default mongod timeout when querying. |         """Enable or disable the default mongod timeout when querying. (no_cursor_timeout option) | ||||||
|  |  | ||||||
|         :param enabled: whether or not the timeout is used |         :param enabled: whether or not the timeout is used | ||||||
|  |  | ||||||
| @@ -1099,9 +1100,8 @@ class BaseQuerySet(object): | |||||||
|  |  | ||||||
|         .. deprecated:: Ignored with PyMongo 3+ |         .. deprecated:: Ignored with PyMongo 3+ | ||||||
|         """ |         """ | ||||||
|         if IS_PYMONGO_3: |         msg = 'slave_okay is deprecated as it has no impact when using PyMongo 3+.' | ||||||
|             msg = 'slave_okay is deprecated as it has no impact when using PyMongo 3+.' |         warnings.warn(msg, DeprecationWarning) | ||||||
|             warnings.warn(msg, DeprecationWarning) |  | ||||||
|         queryset = self.clone() |         queryset = self.clone() | ||||||
|         queryset._slave_okay = enabled |         queryset._slave_okay = enabled | ||||||
|         return queryset |         return queryset | ||||||
| @@ -1191,14 +1191,18 @@ class BaseQuerySet(object): | |||||||
|             initial_pipeline.append({'$sort': dict(self._ordering)}) |             initial_pipeline.append({'$sort': dict(self._ordering)}) | ||||||
|  |  | ||||||
|         if self._limit is not None: |         if self._limit is not None: | ||||||
|             initial_pipeline.append({'$limit': self._limit}) |             # As per MongoDB Documentation (https://docs.mongodb.com/manual/reference/operator/aggregation/limit/), | ||||||
|  |             # keeping limit stage right after sort stage is more efficient. But this leads to wrong set of documents | ||||||
|  |             # for a skip stage that might succeed these. So we need to maintain more documents in memory in such a | ||||||
|  |             # case (https://stackoverflow.com/a/24161461). | ||||||
|  |             initial_pipeline.append({'$limit': self._limit + (self._skip or 0)}) | ||||||
|  |  | ||||||
|         if self._skip is not None: |         if self._skip is not None: | ||||||
|             initial_pipeline.append({'$skip': self._skip}) |             initial_pipeline.append({'$skip': self._skip}) | ||||||
|  |  | ||||||
|         pipeline = initial_pipeline + list(pipeline) |         pipeline = initial_pipeline + list(pipeline) | ||||||
|  |  | ||||||
|         if IS_PYMONGO_3 and self._read_preference is not None: |         if self._read_preference is not None: | ||||||
|             return self._collection.with_options(read_preference=self._read_preference) \ |             return self._collection.with_options(read_preference=self._read_preference) \ | ||||||
|                        .aggregate(pipeline, cursor={}, **kwargs) |                        .aggregate(pipeline, cursor={}, **kwargs) | ||||||
|  |  | ||||||
| @@ -1408,11 +1412,7 @@ class BaseQuerySet(object): | |||||||
|         if isinstance(field_instances[-1], ListField): |         if isinstance(field_instances[-1], ListField): | ||||||
|             pipeline.insert(1, {'$unwind': '$' + field}) |             pipeline.insert(1, {'$unwind': '$' + field}) | ||||||
|  |  | ||||||
|         result = self._document._get_collection().aggregate(pipeline) |         result = tuple(self._document._get_collection().aggregate(pipeline)) | ||||||
|         if IS_PYMONGO_3: |  | ||||||
|             result = tuple(result) |  | ||||||
|         else: |  | ||||||
|             result = result.get('result') |  | ||||||
|  |  | ||||||
|         if result: |         if result: | ||||||
|             return result[0]['total'] |             return result[0]['total'] | ||||||
| @@ -1439,11 +1439,7 @@ class BaseQuerySet(object): | |||||||
|         if isinstance(field_instances[-1], ListField): |         if isinstance(field_instances[-1], ListField): | ||||||
|             pipeline.insert(1, {'$unwind': '$' + field}) |             pipeline.insert(1, {'$unwind': '$' + field}) | ||||||
|  |  | ||||||
|         result = self._document._get_collection().aggregate(pipeline) |         result = tuple(self._document._get_collection().aggregate(pipeline)) | ||||||
|         if IS_PYMONGO_3: |  | ||||||
|             result = tuple(result) |  | ||||||
|         else: |  | ||||||
|             result = result.get('result') |  | ||||||
|         if result: |         if result: | ||||||
|             return result[0]['total'] |             return result[0]['total'] | ||||||
|         return 0 |         return 0 | ||||||
| @@ -1477,16 +1473,16 @@ class BaseQuerySet(object): | |||||||
|  |  | ||||||
|     # Iterator helpers |     # Iterator helpers | ||||||
|  |  | ||||||
|     def next(self): |     def __next__(self): | ||||||
|         """Wrap the result in a :class:`~mongoengine.Document` object. |         """Wrap the result in a :class:`~mongoengine.Document` object. | ||||||
|         """ |         """ | ||||||
|         if self._limit == 0 or self._none: |         if self._limit == 0 or self._none: | ||||||
|             raise StopIteration |             raise StopIteration | ||||||
|  |  | ||||||
|         raw_doc = self._cursor.next() |         raw_doc = six.next(self._cursor) | ||||||
|  |  | ||||||
|         if self._as_pymongo: |         if self._as_pymongo: | ||||||
|             return self._get_as_pymongo(raw_doc) |             return raw_doc | ||||||
|  |  | ||||||
|         doc = self._document._from_son( |         doc = self._document._from_son( | ||||||
|             raw_doc, _auto_dereference=self._auto_dereference, |             raw_doc, _auto_dereference=self._auto_dereference, | ||||||
| @@ -1497,6 +1493,8 @@ class BaseQuerySet(object): | |||||||
|  |  | ||||||
|         return doc |         return doc | ||||||
|  |  | ||||||
|  |     next = __next__     # For Python2 support | ||||||
|  |  | ||||||
|     def rewind(self): |     def rewind(self): | ||||||
|         """Rewind the cursor to its unevaluated state. |         """Rewind the cursor to its unevaluated state. | ||||||
|  |  | ||||||
| @@ -1516,26 +1514,16 @@ class BaseQuerySet(object): | |||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def _cursor_args(self): |     def _cursor_args(self): | ||||||
|         if not IS_PYMONGO_3: |         fields_name = 'projection' | ||||||
|             fields_name = 'fields' |         # snapshot is not handled at all by PyMongo 3+ | ||||||
|             cursor_args = { |         # TODO: evaluate similar possibilities using modifiers | ||||||
|                 'timeout': self._timeout, |         if self._snapshot: | ||||||
|                 'snapshot': self._snapshot |             msg = 'The snapshot option is not anymore available with PyMongo 3+' | ||||||
|             } |             warnings.warn(msg, DeprecationWarning) | ||||||
|             if self._read_preference is not None: |         cursor_args = { | ||||||
|                 cursor_args['read_preference'] = self._read_preference |             'no_cursor_timeout': not self._timeout | ||||||
|             else: |         } | ||||||
|                 cursor_args['slave_okay'] = self._slave_okay |  | ||||||
|         else: |  | ||||||
|             fields_name = 'projection' |  | ||||||
|             # snapshot is not handled at all by PyMongo 3+ |  | ||||||
|             # TODO: evaluate similar possibilities using modifiers |  | ||||||
|             if self._snapshot: |  | ||||||
|                 msg = 'The snapshot option is not anymore available with PyMongo 3+' |  | ||||||
|                 warnings.warn(msg, DeprecationWarning) |  | ||||||
|             cursor_args = { |  | ||||||
|                 'no_cursor_timeout': not self._timeout |  | ||||||
|             } |  | ||||||
|         if self._loaded_fields: |         if self._loaded_fields: | ||||||
|             cursor_args[fields_name] = self._loaded_fields.as_dict() |             cursor_args[fields_name] = self._loaded_fields.as_dict() | ||||||
|  |  | ||||||
| @@ -1559,7 +1547,7 @@ class BaseQuerySet(object): | |||||||
|         # XXX In PyMongo 3+, we define the read preference on a collection |         # XXX In PyMongo 3+, we define the read preference on a collection | ||||||
|         # level, not a cursor level. Thus, we need to get a cloned collection |         # level, not a cursor level. Thus, we need to get a cloned collection | ||||||
|         # object using `with_options` first. |         # object using `with_options` first. | ||||||
|         if IS_PYMONGO_3 and self._read_preference is not None: |         if self._read_preference is not None: | ||||||
|             self._cursor_obj = self._collection\ |             self._cursor_obj = self._collection\ | ||||||
|                 .with_options(read_preference=self._read_preference)\ |                 .with_options(read_preference=self._read_preference)\ | ||||||
|                 .find(self._query, **self._cursor_args) |                 .find(self._query, **self._cursor_args) | ||||||
| @@ -1729,13 +1717,13 @@ class BaseQuerySet(object): | |||||||
|             } |             } | ||||||
|         """ |         """ | ||||||
|         total, data, types = self.exec_js(freq_func, field) |         total, data, types = self.exec_js(freq_func, field) | ||||||
|         values = {types.get(k): int(v) for k, v in data.iteritems()} |         values = {types.get(k): int(v) for k, v in iteritems(data)} | ||||||
|  |  | ||||||
|         if normalize: |         if normalize: | ||||||
|             values = {k: float(v) / total for k, v in values.items()} |             values = {k: float(v) / total for k, v in values.items()} | ||||||
|  |  | ||||||
|         frequencies = {} |         frequencies = {} | ||||||
|         for k, v in values.iteritems(): |         for k, v in iteritems(values): | ||||||
|             if isinstance(k, float): |             if isinstance(k, float): | ||||||
|                 if int(k) == k: |                 if int(k) == k: | ||||||
|                     k = int(k) |                     k = int(k) | ||||||
| @@ -1831,26 +1819,6 @@ class BaseQuerySet(object): | |||||||
|  |  | ||||||
|         return tuple(data) |         return tuple(data) | ||||||
|  |  | ||||||
|     def _get_as_pymongo(self, doc): |  | ||||||
|         """Clean up a PyMongo doc, removing fields that were only fetched |  | ||||||
|         for the sake of MongoEngine's implementation, and return it. |  | ||||||
|         """ |  | ||||||
|         # Always remove _cls as a MongoEngine's implementation detail. |  | ||||||
|         if '_cls' in doc: |  | ||||||
|             del doc['_cls'] |  | ||||||
|  |  | ||||||
|         # If the _id was not included in a .only or was excluded in a .exclude, |  | ||||||
|         # remove it from the doc (we always fetch it so that we can properly |  | ||||||
|         # construct documents). |  | ||||||
|         fields = self._loaded_fields |  | ||||||
|         if fields and '_id' in doc and ( |  | ||||||
|             (fields.value == QueryFieldList.ONLY and '_id' not in fields.fields) or |  | ||||||
|             (fields.value == QueryFieldList.EXCLUDE and '_id' in fields.fields) |  | ||||||
|         ): |  | ||||||
|             del doc['_id'] |  | ||||||
|  |  | ||||||
|         return doc |  | ||||||
|  |  | ||||||
|     def _sub_js_fields(self, code): |     def _sub_js_fields(self, code): | ||||||
|         """When fields are specified with [~fieldname] syntax, where |         """When fields are specified with [~fieldname] syntax, where | ||||||
|         *fieldname* is the Python name of a field, *fieldname* will be |         *fieldname* is the Python name of a field, *fieldname* will be | ||||||
| @@ -1872,8 +1840,8 @@ class BaseQuerySet(object): | |||||||
|             # Substitute the correct name for the field into the javascript |             # Substitute the correct name for the field into the javascript | ||||||
|             return '.'.join([f.db_field for f in fields]) |             return '.'.join([f.db_field for f in fields]) | ||||||
|  |  | ||||||
|         code = re.sub(u'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code) |         code = re.sub(r'\[\s*~([A-z_][A-z_0-9.]+?)\s*\]', field_sub, code) | ||||||
|         code = re.sub(u'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub, |         code = re.sub(r'\{\{\s*~([A-z_][A-z_0-9.]+?)\s*\}\}', field_path_sub, | ||||||
|                       code) |                       code) | ||||||
|         return code |         return code | ||||||
|  |  | ||||||
|   | |||||||
| @@ -63,9 +63,11 @@ class QueryFieldList(object): | |||||||
|             self._only_called = True |             self._only_called = True | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
|     def __nonzero__(self): |     def __bool__(self): | ||||||
|         return bool(self.fields) |         return bool(self.fields) | ||||||
|  |  | ||||||
|  |     __nonzero__ = __bool__  # For Py2 support | ||||||
|  |  | ||||||
|     def as_dict(self): |     def as_dict(self): | ||||||
|         field_list = {field: self.value for field in self.fields} |         field_list = {field: self.value for field in self.fields} | ||||||
|         if self.slice: |         if self.slice: | ||||||
|   | |||||||
| @@ -36,7 +36,7 @@ class QuerySetManager(object): | |||||||
|         queryset_class = owner._meta.get('queryset_class', self.default) |         queryset_class = owner._meta.get('queryset_class', self.default) | ||||||
|         queryset = queryset_class(owner, owner._get_collection()) |         queryset = queryset_class(owner, owner._get_collection()) | ||||||
|         if self.get_queryset: |         if self.get_queryset: | ||||||
|             arg_count = self.get_queryset.func_code.co_argcount |             arg_count = self.get_queryset.__code__.co_argcount | ||||||
|             if arg_count == 1: |             if arg_count == 1: | ||||||
|                 queryset = self.get_queryset(queryset) |                 queryset = self.get_queryset(queryset) | ||||||
|             elif arg_count == 2: |             elif arg_count == 2: | ||||||
|   | |||||||
| @@ -89,7 +89,7 @@ class QuerySet(BaseQuerySet): | |||||||
|                 yield self._result_cache[pos] |                 yield self._result_cache[pos] | ||||||
|                 pos += 1 |                 pos += 1 | ||||||
|  |  | ||||||
|             # Raise StopIteration if we already established there were no more |             # return if we already established there were no more | ||||||
|             # docs in the db cursor. |             # docs in the db cursor. | ||||||
|             if not self._has_more: |             if not self._has_more: | ||||||
|                 return |                 return | ||||||
| @@ -115,7 +115,7 @@ class QuerySet(BaseQuerySet): | |||||||
|         # the result cache. |         # the result cache. | ||||||
|         try: |         try: | ||||||
|             for _ in six.moves.range(ITER_CHUNK_SIZE): |             for _ in six.moves.range(ITER_CHUNK_SIZE): | ||||||
|                 self._result_cache.append(self.next()) |                 self._result_cache.append(six.next(self)) | ||||||
|         except StopIteration: |         except StopIteration: | ||||||
|             # Getting this exception means there are no more docs in the |             # Getting this exception means there are no more docs in the | ||||||
|             # db cursor. Set _has_more to False so that we can use that |             # db cursor. Set _has_more to False so that we can use that | ||||||
| @@ -170,7 +170,7 @@ class QuerySetNoCache(BaseQuerySet): | |||||||
|         data = [] |         data = [] | ||||||
|         for _ in six.moves.range(REPR_OUTPUT_SIZE + 1): |         for _ in six.moves.range(REPR_OUTPUT_SIZE + 1): | ||||||
|             try: |             try: | ||||||
|                 data.append(self.next()) |                 data.append(six.next(self)) | ||||||
|             except StopIteration: |             except StopIteration: | ||||||
|                 break |                 break | ||||||
|  |  | ||||||
| @@ -186,10 +186,3 @@ class QuerySetNoCache(BaseQuerySet): | |||||||
|             queryset = self.clone() |             queryset = self.clone() | ||||||
|         queryset.rewind() |         queryset.rewind() | ||||||
|         return queryset |         return queryset | ||||||
|  |  | ||||||
|  |  | ||||||
| class QuerySetNoDeRef(QuerySet): |  | ||||||
|     """Special no_dereference QuerySet""" |  | ||||||
|  |  | ||||||
|     def __dereference(items, max_depth=1, instance=None, name=None): |  | ||||||
|         return items |  | ||||||
|   | |||||||
| @@ -4,12 +4,11 @@ from bson import ObjectId, SON | |||||||
| from bson.dbref import DBRef | from bson.dbref import DBRef | ||||||
| import pymongo | import pymongo | ||||||
| import six | import six | ||||||
|  | from six import iteritems | ||||||
|  |  | ||||||
| from mongoengine.base import UPDATE_OPERATORS | from mongoengine.base import UPDATE_OPERATORS | ||||||
| from mongoengine.common import _import_class | from mongoengine.common import _import_class | ||||||
| from mongoengine.connection import get_connection |  | ||||||
| from mongoengine.errors import InvalidQueryError | from mongoengine.errors import InvalidQueryError | ||||||
| from mongoengine.python_support import IS_PYMONGO_3 |  | ||||||
|  |  | ||||||
| __all__ = ('query', 'update') | __all__ = ('query', 'update') | ||||||
|  |  | ||||||
| @@ -87,18 +86,10 @@ def query(_doc_cls=None, **kwargs): | |||||||
|             singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not'] |             singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not'] | ||||||
|             singular_ops += STRING_OPERATORS |             singular_ops += STRING_OPERATORS | ||||||
|             if op in singular_ops: |             if op in singular_ops: | ||||||
|                 if isinstance(field, six.string_types): |                 value = field.prepare_query_value(op, value) | ||||||
|                     if (op in STRING_OPERATORS and |  | ||||||
|                             isinstance(value, six.string_types)): |  | ||||||
|                         StringField = _import_class('StringField') |  | ||||||
|                         value = StringField.prepare_query_value(op, value) |  | ||||||
|                     else: |  | ||||||
|                         value = field |  | ||||||
|                 else: |  | ||||||
|                     value = field.prepare_query_value(op, value) |  | ||||||
|  |  | ||||||
|                     if isinstance(field, CachedReferenceField) and value: |                 if isinstance(field, CachedReferenceField) and value: | ||||||
|                         value = value['_id'] |                     value = value['_id'] | ||||||
|  |  | ||||||
|             elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict): |             elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict): | ||||||
|                 # Raise an error if the in/nin/all/near param is not iterable. |                 # Raise an error if the in/nin/all/near param is not iterable. | ||||||
| @@ -147,14 +138,14 @@ def query(_doc_cls=None, **kwargs): | |||||||
|         if op is None or key not in mongo_query: |         if op is None or key not in mongo_query: | ||||||
|             mongo_query[key] = value |             mongo_query[key] = value | ||||||
|         elif key in mongo_query: |         elif key in mongo_query: | ||||||
|             if isinstance(mongo_query[key], dict): |             if isinstance(mongo_query[key], dict) and isinstance(value, dict): | ||||||
|                 mongo_query[key].update(value) |                 mongo_query[key].update(value) | ||||||
|                 # $max/minDistance needs to come last - convert to SON |                 # $max/minDistance needs to come last - convert to SON | ||||||
|                 value_dict = mongo_query[key] |                 value_dict = mongo_query[key] | ||||||
|                 if ('$maxDistance' in value_dict or '$minDistance' in value_dict) and \ |                 if ('$maxDistance' in value_dict or '$minDistance' in value_dict) and \ | ||||||
|                         ('$near' in value_dict or '$nearSphere' in value_dict): |                         ('$near' in value_dict or '$nearSphere' in value_dict): | ||||||
|                     value_son = SON() |                     value_son = SON() | ||||||
|                     for k, v in value_dict.iteritems(): |                     for k, v in iteritems(value_dict): | ||||||
|                         if k == '$maxDistance' or k == '$minDistance': |                         if k == '$maxDistance' or k == '$minDistance': | ||||||
|                             continue |                             continue | ||||||
|                         value_son[k] = v |                         value_son[k] = v | ||||||
| @@ -162,16 +153,14 @@ def query(_doc_cls=None, **kwargs): | |||||||
|                     # PyMongo 3+ and MongoDB < 2.6 |                     # PyMongo 3+ and MongoDB < 2.6 | ||||||
|                     near_embedded = False |                     near_embedded = False | ||||||
|                     for near_op in ('$near', '$nearSphere'): |                     for near_op in ('$near', '$nearSphere'): | ||||||
|                         if isinstance(value_dict.get(near_op), dict) and ( |                         if isinstance(value_dict.get(near_op), dict): | ||||||
|                                 IS_PYMONGO_3 or get_connection().max_wire_version > 1): |  | ||||||
|                             value_son[near_op] = SON(value_son[near_op]) |                             value_son[near_op] = SON(value_son[near_op]) | ||||||
|                             if '$maxDistance' in value_dict: |                             if '$maxDistance' in value_dict: | ||||||
|                                 value_son[near_op][ |                                 value_son[near_op]['$maxDistance'] = value_dict['$maxDistance'] | ||||||
|                                     '$maxDistance'] = value_dict['$maxDistance'] |  | ||||||
|                             if '$minDistance' in value_dict: |                             if '$minDistance' in value_dict: | ||||||
|                                 value_son[near_op][ |                                 value_son[near_op]['$minDistance'] = value_dict['$minDistance'] | ||||||
|                                     '$minDistance'] = value_dict['$minDistance'] |  | ||||||
|                             near_embedded = True |                             near_embedded = True | ||||||
|  |  | ||||||
|                     if not near_embedded: |                     if not near_embedded: | ||||||
|                         if '$maxDistance' in value_dict: |                         if '$maxDistance' in value_dict: | ||||||
|                             value_son['$maxDistance'] = value_dict['$maxDistance'] |                             value_son['$maxDistance'] = value_dict['$maxDistance'] | ||||||
| @@ -201,30 +190,37 @@ def update(_doc_cls=None, **update): | |||||||
|     format. |     format. | ||||||
|     """ |     """ | ||||||
|     mongo_update = {} |     mongo_update = {} | ||||||
|  |  | ||||||
|     for key, value in update.items(): |     for key, value in update.items(): | ||||||
|         if key == '__raw__': |         if key == '__raw__': | ||||||
|             mongo_update.update(value) |             mongo_update.update(value) | ||||||
|             continue |             continue | ||||||
|  |  | ||||||
|         parts = key.split('__') |         parts = key.split('__') | ||||||
|  |  | ||||||
|         # if there is no operator, default to 'set' |         # if there is no operator, default to 'set' | ||||||
|         if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS: |         if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS: | ||||||
|             parts.insert(0, 'set') |             parts.insert(0, 'set') | ||||||
|  |  | ||||||
|         # Check for an operator and transform to mongo-style if there is |         # Check for an operator and transform to mongo-style if there is | ||||||
|         op = None |         op = None | ||||||
|         if parts[0] in UPDATE_OPERATORS: |         if parts[0] in UPDATE_OPERATORS: | ||||||
|             op = parts.pop(0) |             op = parts.pop(0) | ||||||
|             # Convert Pythonic names to Mongo equivalents |             # Convert Pythonic names to Mongo equivalents | ||||||
|             if op in ('push_all', 'pull_all'): |             operator_map = { | ||||||
|                 op = op.replace('_all', 'All') |                 'push_all': 'pushAll', | ||||||
|             elif op == 'dec': |                 'pull_all': 'pullAll', | ||||||
|  |                 'dec': 'inc', | ||||||
|  |                 'add_to_set': 'addToSet', | ||||||
|  |                 'set_on_insert': 'setOnInsert' | ||||||
|  |             } | ||||||
|  |             if op == 'dec': | ||||||
|                 # Support decrement by flipping a positive value's sign |                 # Support decrement by flipping a positive value's sign | ||||||
|                 # and using 'inc' |                 # and using 'inc' | ||||||
|                 op = 'inc' |  | ||||||
|                 value = -value |                 value = -value | ||||||
|             elif op == 'add_to_set': |             # If the operator doesn't found from operator map, the op value | ||||||
|                 op = 'addToSet' |             # will stay unchanged | ||||||
|             elif op == 'set_on_insert': |             op = operator_map.get(op, op) | ||||||
|                 op = 'setOnInsert' |  | ||||||
|  |  | ||||||
|         match = None |         match = None | ||||||
|         if parts[-1] in COMPARISON_OPERATORS: |         if parts[-1] in COMPARISON_OPERATORS: | ||||||
| @@ -273,7 +269,7 @@ def update(_doc_cls=None, **update): | |||||||
|  |  | ||||||
|             if op == 'pull': |             if op == 'pull': | ||||||
|                 if field.required or value is not None: |                 if field.required or value is not None: | ||||||
|                     if match == 'in' and not isinstance(value, dict): |                     if match in ('in', 'nin') and not isinstance(value, dict): | ||||||
|                         value = _prepare_query_for_iterable(field, op, value) |                         value = _prepare_query_for_iterable(field, op, value) | ||||||
|                     else: |                     else: | ||||||
|                         value = field.prepare_query_value(op, value) |                         value = field.prepare_query_value(op, value) | ||||||
| @@ -291,6 +287,8 @@ def update(_doc_cls=None, **update): | |||||||
|                     value = field.prepare_query_value(op, value) |                     value = field.prepare_query_value(op, value) | ||||||
|             elif op == 'unset': |             elif op == 'unset': | ||||||
|                 value = 1 |                 value = 1 | ||||||
|  |             elif op == 'inc': | ||||||
|  |                 value = field.prepare_query_value(op, value) | ||||||
|  |  | ||||||
|         if match: |         if match: | ||||||
|             match = '$' + match |             match = '$' + match | ||||||
| @@ -298,10 +296,6 @@ def update(_doc_cls=None, **update): | |||||||
|  |  | ||||||
|         key = '.'.join(parts) |         key = '.'.join(parts) | ||||||
|  |  | ||||||
|         if not op: |  | ||||||
|             raise InvalidQueryError('Updates must supply an operation ' |  | ||||||
|                                     'eg: set__FIELD=value') |  | ||||||
|  |  | ||||||
|         if 'pull' in op and '.' in key: |         if 'pull' in op and '.' in key: | ||||||
|             # Dot operators don't work on pull operations |             # Dot operators don't work on pull operations | ||||||
|             # unless they point to a list field |             # unless they point to a list field | ||||||
| @@ -336,7 +330,7 @@ def update(_doc_cls=None, **update): | |||||||
|             value = {key: {'$each': value}} |             value = {key: {'$each': value}} | ||||||
|         elif op in ('push', 'pushAll'): |         elif op in ('push', 'pushAll'): | ||||||
|             if parts[-1].isdigit(): |             if parts[-1].isdigit(): | ||||||
|                 key = parts[0] |                 key = '.'.join(parts[0:-1]) | ||||||
|                 position = int(parts[-1]) |                 position = int(parts[-1]) | ||||||
|                 # $position expects an iterable. If pushing a single value, |                 # $position expects an iterable. If pushing a single value, | ||||||
|                 # wrap it in a list. |                 # wrap it in a list. | ||||||
| @@ -420,7 +414,6 @@ def _infer_geometry(value): | |||||||
|                                 'type and coordinates keys') |                                 'type and coordinates keys') | ||||||
|     elif isinstance(value, (list, set)): |     elif isinstance(value, (list, set)): | ||||||
|         # TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon? |         # TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon? | ||||||
|         # TODO: should both TypeError and IndexError be alike interpreted? |  | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             value[0][0][0] |             value[0][0][0] | ||||||
|   | |||||||
| @@ -3,7 +3,7 @@ import copy | |||||||
| from mongoengine.errors import InvalidQueryError | from mongoengine.errors import InvalidQueryError | ||||||
| from mongoengine.queryset import transform | from mongoengine.queryset import transform | ||||||
|  |  | ||||||
| __all__ = ('Q',) | __all__ = ('Q', 'QNode') | ||||||
|  |  | ||||||
|  |  | ||||||
| class QNodeVisitor(object): | class QNodeVisitor(object): | ||||||
| @@ -131,6 +131,10 @@ class QCombination(QNode): | |||||||
|             else: |             else: | ||||||
|                 self.children.append(node) |                 self.children.append(node) | ||||||
|  |  | ||||||
|  |     def __repr__(self): | ||||||
|  |         op = ' & ' if self.operation is self.AND else ' | ' | ||||||
|  |         return '(%s)' % op.join([repr(node) for node in self.children]) | ||||||
|  |  | ||||||
|     def accept(self, visitor): |     def accept(self, visitor): | ||||||
|         for i in range(len(self.children)): |         for i in range(len(self.children)): | ||||||
|             if isinstance(self.children[i], QNode): |             if isinstance(self.children[i], QNode): | ||||||
| @@ -151,6 +155,9 @@ class Q(QNode): | |||||||
|     def __init__(self, **query): |     def __init__(self, **query): | ||||||
|         self.query = query |         self.query = query | ||||||
|  |  | ||||||
|  |     def __repr__(self): | ||||||
|  |         return 'Q(**%s)' % repr(self.query) | ||||||
|  |  | ||||||
|     def accept(self, visitor): |     def accept(self, visitor): | ||||||
|         return visitor.visit_query(self) |         return visitor.visit_query(self) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,5 +1,5 @@ | |||||||
| nose | nose | ||||||
| pymongo>=2.7.1 | pymongo>=3.4 | ||||||
| six==1.10.0 | six==1.10.0 | ||||||
| flake8 | flake8 | ||||||
| flake8-import-order | flake8-import-order | ||||||
|   | |||||||
| @@ -5,7 +5,7 @@ detailed-errors=1 | |||||||
| cover-package=mongoengine | cover-package=mongoengine | ||||||
|  |  | ||||||
| [flake8] | [flake8] | ||||||
| ignore=E501,F401,F403,F405,I201,I202 | ignore=E501,F401,F403,F405,I201,I202,W504, W605 | ||||||
| exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests | exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests | ||||||
| max-complexity=47 | max-complexity=47 | ||||||
| application-import-names=mongoengine,tests | application-import-names=mongoengine,tests | ||||||
|   | |||||||
							
								
								
									
										5
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										5
									
								
								setup.py
									
									
									
									
									
								
							| @@ -44,9 +44,8 @@ CLASSIFIERS = [ | |||||||
|     "Programming Language :: Python :: 2", |     "Programming Language :: Python :: 2", | ||||||
|     "Programming Language :: Python :: 2.7", |     "Programming Language :: Python :: 2.7", | ||||||
|     "Programming Language :: Python :: 3", |     "Programming Language :: Python :: 3", | ||||||
|     "Programming Language :: Python :: 3.3", |  | ||||||
|     "Programming Language :: Python :: 3.4", |  | ||||||
|     "Programming Language :: Python :: 3.5", |     "Programming Language :: Python :: 3.5", | ||||||
|  |     "Programming Language :: Python :: 3.6", | ||||||
|     "Programming Language :: Python :: Implementation :: CPython", |     "Programming Language :: Python :: Implementation :: CPython", | ||||||
|     "Programming Language :: Python :: Implementation :: PyPy", |     "Programming Language :: Python :: Implementation :: PyPy", | ||||||
|     'Topic :: Database', |     'Topic :: Database', | ||||||
| @@ -81,7 +80,7 @@ setup( | |||||||
|     long_description=LONG_DESCRIPTION, |     long_description=LONG_DESCRIPTION, | ||||||
|     platforms=['any'], |     platforms=['any'], | ||||||
|     classifiers=CLASSIFIERS, |     classifiers=CLASSIFIERS, | ||||||
|     install_requires=['pymongo>=2.7.1', 'six'], |     install_requires=['pymongo>=3.4', 'six'], | ||||||
|     test_suite='nose.collector', |     test_suite='nose.collector', | ||||||
|     **extra_opts |     **extra_opts | ||||||
| ) | ) | ||||||
|   | |||||||
| @@ -1,4 +1,4 @@ | |||||||
| from all_warnings import AllWarnings | from .all_warnings import AllWarnings | ||||||
| from document import * | from .document import * | ||||||
| from queryset import * | from .queryset import * | ||||||
| from fields import * | from .fields import * | ||||||
|   | |||||||
| @@ -1,13 +1,13 @@ | |||||||
| import unittest | import unittest | ||||||
|  |  | ||||||
| from class_methods import * | from .class_methods import * | ||||||
| from delta import * | from .delta import * | ||||||
| from dynamic import * | from .dynamic import * | ||||||
| from indexes import * | from .indexes import * | ||||||
| from inheritance import * | from .inheritance import * | ||||||
| from instance import * | from .instance import * | ||||||
| from json_serialisation import * | from .json_serialisation import * | ||||||
| from validation import * | from .validation import * | ||||||
|  |  | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|     unittest.main() |     unittest.main() | ||||||
|   | |||||||
| @@ -2,10 +2,10 @@ | |||||||
| import unittest | import unittest | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
|  | from mongoengine.pymongo_support import list_collection_names | ||||||
|  |  | ||||||
| from mongoengine.queryset import NULLIFY, PULL | from mongoengine.queryset import NULLIFY, PULL | ||||||
| from mongoengine.connection import get_db | from mongoengine.connection import get_db | ||||||
| from tests.utils import needs_mongodb_v26 |  | ||||||
|  |  | ||||||
| __all__ = ("ClassMethodsTest", ) | __all__ = ("ClassMethodsTest", ) | ||||||
|  |  | ||||||
| @@ -27,9 +27,7 @@ class ClassMethodsTest(unittest.TestCase): | |||||||
|         self.Person = Person |         self.Person = Person | ||||||
|  |  | ||||||
|     def tearDown(self): |     def tearDown(self): | ||||||
|         for collection in self.db.collection_names(): |         for collection in list_collection_names(self.db): | ||||||
|             if 'system.' in collection: |  | ||||||
|                 continue |  | ||||||
|             self.db.drop_collection(collection) |             self.db.drop_collection(collection) | ||||||
|  |  | ||||||
|     def test_definition(self): |     def test_definition(self): | ||||||
| @@ -66,10 +64,10 @@ class ClassMethodsTest(unittest.TestCase): | |||||||
|         """ |         """ | ||||||
|         collection_name = 'person' |         collection_name = 'person' | ||||||
|         self.Person(name='Test').save() |         self.Person(name='Test').save() | ||||||
|         self.assertTrue(collection_name in self.db.collection_names()) |         self.assertIn(collection_name, list_collection_names(self.db)) | ||||||
|  |  | ||||||
|         self.Person.drop_collection() |         self.Person.drop_collection() | ||||||
|         self.assertFalse(collection_name in self.db.collection_names()) |         self.assertNotIn(collection_name, list_collection_names(self.db)) | ||||||
|  |  | ||||||
|     def test_register_delete_rule(self): |     def test_register_delete_rule(self): | ||||||
|         """Ensure that register delete rule adds a delete rule to the document |         """Ensure that register delete rule adds a delete rule to the document | ||||||
| @@ -102,16 +100,16 @@ class ClassMethodsTest(unittest.TestCase): | |||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|         BlogPost.ensure_indexes() |         BlogPost.ensure_indexes() | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] }) |         self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []}) | ||||||
|  |  | ||||||
|         BlogPost.ensure_index(['author', 'description']) |         BlogPost.ensure_index(['author', 'description']) | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [[('author', 1), ('description', 1)]] }) |         self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': [[('author', 1), ('description', 1)]]}) | ||||||
|  |  | ||||||
|         BlogPost._get_collection().drop_index('author_1_description_1') |         BlogPost._get_collection().drop_index('author_1_description_1') | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] }) |         self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []}) | ||||||
|  |  | ||||||
|         BlogPost._get_collection().drop_index('author_1_title_1') |         BlogPost._get_collection().drop_index('author_1_title_1') | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [[('author', 1), ('title', 1)]], 'extra': [] }) |         self.assertEqual(BlogPost.compare_indexes(), {'missing': [[('author', 1), ('title', 1)]], 'extra': []}) | ||||||
|  |  | ||||||
|     def test_compare_indexes_inheritance(self): |     def test_compare_indexes_inheritance(self): | ||||||
|         """ Ensure that the indexes are properly created and that |         """ Ensure that the indexes are properly created and that | ||||||
| @@ -140,16 +138,16 @@ class ClassMethodsTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         BlogPost.ensure_indexes() |         BlogPost.ensure_indexes() | ||||||
|         BlogPostWithTags.ensure_indexes() |         BlogPostWithTags.ensure_indexes() | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] }) |         self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []}) | ||||||
|  |  | ||||||
|         BlogPostWithTags.ensure_index(['author', 'tag_list']) |         BlogPostWithTags.ensure_index(['author', 'tag_list']) | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [[('_cls', 1), ('author', 1), ('tag_list', 1)]] }) |         self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': [[('_cls', 1), ('author', 1), ('tag_list', 1)]]}) | ||||||
|  |  | ||||||
|         BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tag_list_1') |         BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tag_list_1') | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] }) |         self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []}) | ||||||
|  |  | ||||||
|         BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tags_1') |         BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tags_1') | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [[('_cls', 1), ('author', 1), ('tags', 1)]], 'extra': [] }) |         self.assertEqual(BlogPost.compare_indexes(), {'missing': [[('_cls', 1), ('author', 1), ('tags', 1)]], 'extra': []}) | ||||||
|  |  | ||||||
|     def test_compare_indexes_multiple_subclasses(self): |     def test_compare_indexes_multiple_subclasses(self): | ||||||
|         """ Ensure that compare_indexes behaves correctly if called from a |         """ Ensure that compare_indexes behaves correctly if called from a | ||||||
| @@ -184,11 +182,10 @@ class ClassMethodsTest(unittest.TestCase): | |||||||
|         BlogPostWithTags.ensure_indexes() |         BlogPostWithTags.ensure_indexes() | ||||||
|         BlogPostWithCustomField.ensure_indexes() |         BlogPostWithCustomField.ensure_indexes() | ||||||
|  |  | ||||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] }) |         self.assertEqual(BlogPost.compare_indexes(), {'missing': [], 'extra': []}) | ||||||
|         self.assertEqual(BlogPostWithTags.compare_indexes(), { 'missing': [], 'extra': [] }) |         self.assertEqual(BlogPostWithTags.compare_indexes(), {'missing': [], 'extra': []}) | ||||||
|         self.assertEqual(BlogPostWithCustomField.compare_indexes(), { 'missing': [], 'extra': [] }) |         self.assertEqual(BlogPostWithCustomField.compare_indexes(), {'missing': [], 'extra': []}) | ||||||
|  |  | ||||||
|     @needs_mongodb_v26 |  | ||||||
|     def test_compare_indexes_for_text_indexes(self): |     def test_compare_indexes_for_text_indexes(self): | ||||||
|         """ Ensure that compare_indexes behaves correctly for text indexes """ |         """ Ensure that compare_indexes behaves correctly for text indexes """ | ||||||
|  |  | ||||||
| @@ -340,7 +337,7 @@ class ClassMethodsTest(unittest.TestCase): | |||||||
|             meta = {'collection': collection_name} |             meta = {'collection': collection_name} | ||||||
|  |  | ||||||
|         Person(name="Test User").save() |         Person(name="Test User").save() | ||||||
|         self.assertTrue(collection_name in self.db.collection_names()) |         self.assertIn(collection_name, list_collection_names(self.db)) | ||||||
|  |  | ||||||
|         user_obj = self.db[collection_name].find_one() |         user_obj = self.db[collection_name].find_one() | ||||||
|         self.assertEqual(user_obj['name'], "Test User") |         self.assertEqual(user_obj['name'], "Test User") | ||||||
| @@ -349,7 +346,7 @@ class ClassMethodsTest(unittest.TestCase): | |||||||
|         self.assertEqual(user_obj.name, "Test User") |         self.assertEqual(user_obj.name, "Test User") | ||||||
|  |  | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|         self.assertFalse(collection_name in self.db.collection_names()) |         self.assertNotIn(collection_name, list_collection_names(self.db)) | ||||||
|  |  | ||||||
|     def test_collection_name_and_primary(self): |     def test_collection_name_and_primary(self): | ||||||
|         """Ensure that a collection with a specified name may be used. |         """Ensure that a collection with a specified name may be used. | ||||||
|   | |||||||
| @@ -3,16 +3,14 @@ import unittest | |||||||
|  |  | ||||||
| from bson import SON | from bson import SON | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| from mongoengine.connection import get_db | from mongoengine.pymongo_support import list_collection_names | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
| __all__ = ("DeltaTest",) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class DeltaTest(unittest.TestCase): | class DeltaTest(MongoDBTestCase): | ||||||
|  |  | ||||||
|     def setUp(self): |     def setUp(self): | ||||||
|         connect(db='mongoenginetest') |         super(DeltaTest, self).setUp() | ||||||
|         self.db = get_db() |  | ||||||
|  |  | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
| @@ -25,9 +23,7 @@ class DeltaTest(unittest.TestCase): | |||||||
|         self.Person = Person |         self.Person = Person | ||||||
|  |  | ||||||
|     def tearDown(self): |     def tearDown(self): | ||||||
|         for collection in self.db.collection_names(): |         for collection in list_collection_names(self.db): | ||||||
|             if 'system.' in collection: |  | ||||||
|                 continue |  | ||||||
|             self.db.drop_collection(collection) |             self.db.drop_collection(collection) | ||||||
|  |  | ||||||
|     def test_delta(self): |     def test_delta(self): | ||||||
| @@ -694,7 +690,7 @@ class DeltaTest(unittest.TestCase): | |||||||
|         organization.employees.append(person) |         organization.employees.append(person) | ||||||
|         updates, removals = organization._delta() |         updates, removals = organization._delta() | ||||||
|         self.assertEqual({}, removals) |         self.assertEqual({}, removals) | ||||||
|         self.assertTrue('employees' in updates) |         self.assertIn('employees', updates) | ||||||
|  |  | ||||||
|     def test_delta_with_dbref_false(self): |     def test_delta_with_dbref_false(self): | ||||||
|         person, organization, employee = self.circular_reference_deltas_2(Document, Document, False) |         person, organization, employee = self.circular_reference_deltas_2(Document, Document, False) | ||||||
| @@ -709,7 +705,7 @@ class DeltaTest(unittest.TestCase): | |||||||
|         organization.employees.append(person) |         organization.employees.append(person) | ||||||
|         updates, removals = organization._delta() |         updates, removals = organization._delta() | ||||||
|         self.assertEqual({}, removals) |         self.assertEqual({}, removals) | ||||||
|         self.assertTrue('employees' in updates) |         self.assertIn('employees', updates) | ||||||
|  |  | ||||||
|     def test_nested_nested_fields_mark_as_changed(self): |     def test_nested_nested_fields_mark_as_changed(self): | ||||||
|         class EmbeddedDoc(EmbeddedDocument): |         class EmbeddedDoc(EmbeddedDocument): | ||||||
| @@ -863,5 +859,6 @@ class DeltaTest(unittest.TestCase): | |||||||
|         self.assertEqual('oops', delta[0]["users.007.rolist"][0]["type"]) |         self.assertEqual('oops', delta[0]["users.007.rolist"][0]["type"]) | ||||||
|         self.assertEqual(uinfo.id, delta[0]["users.007.info"]) |         self.assertEqual(uinfo.id, delta[0]["users.007.info"]) | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|     unittest.main() |     unittest.main() | ||||||
|   | |||||||
| @@ -1,16 +1,15 @@ | |||||||
| import unittest | import unittest | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| from mongoengine.connection import get_db | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
| __all__ = ("DynamicTest", ) | __all__ = ("TestDynamicDocument", ) | ||||||
|  |  | ||||||
|  |  | ||||||
| class DynamicTest(unittest.TestCase): | class TestDynamicDocument(MongoDBTestCase): | ||||||
|  |  | ||||||
|     def setUp(self): |     def setUp(self): | ||||||
|         connect(db='mongoenginetest') |         super(TestDynamicDocument, self).setUp() | ||||||
|         self.db = get_db() |  | ||||||
|  |  | ||||||
|         class Person(DynamicDocument): |         class Person(DynamicDocument): | ||||||
|             name = StringField() |             name = StringField() | ||||||
| @@ -98,6 +97,72 @@ class DynamicTest(unittest.TestCase): | |||||||
|         self.assertEqual(len(p._data), 4) |         self.assertEqual(len(p._data), 4) | ||||||
|         self.assertEqual(sorted(p._data.keys()), ['_cls', 'age', 'id', 'name']) |         self.assertEqual(sorted(p._data.keys()), ['_cls', 'age', 'id', 'name']) | ||||||
|  |  | ||||||
|  |     def test_fields_without_underscore(self): | ||||||
|  |         """Ensure we can query dynamic fields""" | ||||||
|  |         Person = self.Person | ||||||
|  |  | ||||||
|  |         p = self.Person(name='Dean') | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         raw_p = Person.objects.as_pymongo().get(id=p.id) | ||||||
|  |         self.assertEqual( | ||||||
|  |             raw_p, | ||||||
|  |             { | ||||||
|  |                 '_cls': u'Person', | ||||||
|  |                 '_id': p.id, | ||||||
|  |                 'name': u'Dean' | ||||||
|  |              } | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         p.name = 'OldDean' | ||||||
|  |         p.newattr = 'garbage' | ||||||
|  |         p.save() | ||||||
|  |         raw_p = Person.objects.as_pymongo().get(id=p.id) | ||||||
|  |         self.assertEqual( | ||||||
|  |             raw_p, | ||||||
|  |             { | ||||||
|  |                 '_cls': u'Person', | ||||||
|  |                 '_id': p.id, | ||||||
|  |                 'name': 'OldDean', | ||||||
|  |                 'newattr': u'garbage' | ||||||
|  |             } | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def test_fields_containing_underscore(self): | ||||||
|  |         """Ensure we can query dynamic fields""" | ||||||
|  |         class WeirdPerson(DynamicDocument): | ||||||
|  |             name = StringField() | ||||||
|  |             _name = StringField() | ||||||
|  |  | ||||||
|  |         WeirdPerson.drop_collection() | ||||||
|  |  | ||||||
|  |         p = WeirdPerson(name='Dean', _name='Dean') | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id) | ||||||
|  |         self.assertEqual( | ||||||
|  |             raw_p, | ||||||
|  |             { | ||||||
|  |                 '_id': p.id, | ||||||
|  |                 '_name': u'Dean', | ||||||
|  |                 'name': u'Dean' | ||||||
|  |             } | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         p.name = 'OldDean' | ||||||
|  |         p._name = 'NewDean' | ||||||
|  |         p._newattr1 = 'garbage'    # Unknown fields won't be added | ||||||
|  |         p.save() | ||||||
|  |         raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id) | ||||||
|  |         self.assertEqual( | ||||||
|  |             raw_p, | ||||||
|  |             { | ||||||
|  |                 '_id': p.id, | ||||||
|  |                 '_name': u'NewDean', | ||||||
|  |                 'name': u'OldDean', | ||||||
|  |             } | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     def test_dynamic_document_queries(self): |     def test_dynamic_document_queries(self): | ||||||
|         """Ensure we can query dynamic fields""" |         """Ensure we can query dynamic fields""" | ||||||
|         p = self.Person() |         p = self.Person() | ||||||
| @@ -174,8 +239,8 @@ class DynamicTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         Employee.drop_collection() |         Employee.drop_collection() | ||||||
|  |  | ||||||
|         self.assertTrue('name' in Employee._fields) |         self.assertIn('name', Employee._fields) | ||||||
|         self.assertTrue('salary' in Employee._fields) |         self.assertIn('salary', Employee._fields) | ||||||
|         self.assertEqual(Employee._get_collection_name(), |         self.assertEqual(Employee._get_collection_name(), | ||||||
|                          self.Person._get_collection_name()) |                          self.Person._get_collection_name()) | ||||||
|  |  | ||||||
| @@ -189,7 +254,7 @@ class DynamicTest(unittest.TestCase): | |||||||
|         self.assertEqual(1, Employee.objects(age=20).count()) |         self.assertEqual(1, Employee.objects(age=20).count()) | ||||||
|  |  | ||||||
|         joe_bloggs = self.Person.objects.first() |         joe_bloggs = self.Person.objects.first() | ||||||
|         self.assertTrue(isinstance(joe_bloggs, Employee)) |         self.assertIsInstance(joe_bloggs, Employee) | ||||||
|  |  | ||||||
|     def test_embedded_dynamic_document(self): |     def test_embedded_dynamic_document(self): | ||||||
|         """Test dynamic embedded documents""" |         """Test dynamic embedded documents""" | ||||||
| @@ -369,5 +434,6 @@ class DynamicTest(unittest.TestCase): | |||||||
|         person.save() |         person.save() | ||||||
|         self.assertEqual(Person.objects.first().age, 35) |         self.assertEqual(Person.objects.first().age, 35) | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|     unittest.main() |     unittest.main() | ||||||
|   | |||||||
| @@ -1,16 +1,15 @@ | |||||||
| # -*- coding: utf-8 -*- | # -*- coding: utf-8 -*- | ||||||
| import unittest | import unittest | ||||||
| import sys | from datetime import datetime | ||||||
|  |  | ||||||
| from nose.plugins.skip import SkipTest | from nose.plugins.skip import SkipTest | ||||||
| from datetime import datetime | from pymongo.errors import OperationFailure | ||||||
| import pymongo | import pymongo | ||||||
|  | from six import iteritems | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| from mongoengine.connection import get_db | from mongoengine.connection import get_db | ||||||
|  |  | ||||||
| from tests.utils import get_mongodb_version, needs_mongodb_v26 |  | ||||||
|  |  | ||||||
| __all__ = ("IndexesTest", ) | __all__ = ("IndexesTest", ) | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -68,9 +67,9 @@ class IndexesTest(unittest.TestCase): | |||||||
|         info = BlogPost.objects._collection.index_information() |         info = BlogPost.objects._collection.index_information() | ||||||
|         # _id, '-date', 'tags', ('cat', 'date') |         # _id, '-date', 'tags', ('cat', 'date') | ||||||
|         self.assertEqual(len(info), 4) |         self.assertEqual(len(info), 4) | ||||||
|         info = [value['key'] for key, value in info.iteritems()] |         info = [value['key'] for key, value in iteritems(info)] | ||||||
|         for expected in expected_specs: |         for expected in expected_specs: | ||||||
|             self.assertTrue(expected['fields'] in info) |             self.assertIn(expected['fields'], info) | ||||||
|  |  | ||||||
|     def _index_test_inheritance(self, InheritFrom): |     def _index_test_inheritance(self, InheritFrom): | ||||||
|  |  | ||||||
| @@ -100,9 +99,9 @@ class IndexesTest(unittest.TestCase): | |||||||
|         # the indices on -date and tags will both contain |         # the indices on -date and tags will both contain | ||||||
|         # _cls as first element in the key |         # _cls as first element in the key | ||||||
|         self.assertEqual(len(info), 4) |         self.assertEqual(len(info), 4) | ||||||
|         info = [value['key'] for key, value in info.iteritems()] |         info = [value['key'] for key, value in iteritems(info)] | ||||||
|         for expected in expected_specs: |         for expected in expected_specs: | ||||||
|             self.assertTrue(expected['fields'] in info) |             self.assertIn(expected['fields'], info) | ||||||
|  |  | ||||||
|         class ExtendedBlogPost(BlogPost): |         class ExtendedBlogPost(BlogPost): | ||||||
|             title = StringField() |             title = StringField() | ||||||
| @@ -115,9 +114,9 @@ class IndexesTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         ExtendedBlogPost.ensure_indexes() |         ExtendedBlogPost.ensure_indexes() | ||||||
|         info = ExtendedBlogPost.objects._collection.index_information() |         info = ExtendedBlogPost.objects._collection.index_information() | ||||||
|         info = [value['key'] for key, value in info.iteritems()] |         info = [value['key'] for key, value in iteritems(info)] | ||||||
|         for expected in expected_specs: |         for expected in expected_specs: | ||||||
|             self.assertTrue(expected['fields'] in info) |             self.assertIn(expected['fields'], info) | ||||||
|  |  | ||||||
|     def test_indexes_document_inheritance(self): |     def test_indexes_document_inheritance(self): | ||||||
|         """Ensure that indexes are used when meta[indexes] is specified for |         """Ensure that indexes are used when meta[indexes] is specified for | ||||||
| @@ -225,8 +224,8 @@ class IndexesTest(unittest.TestCase): | |||||||
|         # Indexes are lazy so use list() to perform query |         # Indexes are lazy so use list() to perform query | ||||||
|         list(Person.objects) |         list(Person.objects) | ||||||
|         info = Person.objects._collection.index_information() |         info = Person.objects._collection.index_information() | ||||||
|         info = [value['key'] for key, value in info.iteritems()] |         info = [value['key'] for key, value in iteritems(info)] | ||||||
|         self.assertTrue([('rank.title', 1)] in info) |         self.assertIn([('rank.title', 1)], info) | ||||||
|  |  | ||||||
|     def test_explicit_geo2d_index(self): |     def test_explicit_geo2d_index(self): | ||||||
|         """Ensure that geo2d indexes work when created via meta[indexes] |         """Ensure that geo2d indexes work when created via meta[indexes] | ||||||
| @@ -245,8 +244,8 @@ class IndexesTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         Place.ensure_indexes() |         Place.ensure_indexes() | ||||||
|         info = Place._get_collection().index_information() |         info = Place._get_collection().index_information() | ||||||
|         info = [value['key'] for key, value in info.iteritems()] |         info = [value['key'] for key, value in iteritems(info)] | ||||||
|         self.assertTrue([('location.point', '2d')] in info) |         self.assertIn([('location.point', '2d')], info) | ||||||
|  |  | ||||||
|     def test_explicit_geo2d_index_embedded(self): |     def test_explicit_geo2d_index_embedded(self): | ||||||
|         """Ensure that geo2d indexes work when created via meta[indexes] |         """Ensure that geo2d indexes work when created via meta[indexes] | ||||||
| @@ -268,8 +267,8 @@ class IndexesTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         Place.ensure_indexes() |         Place.ensure_indexes() | ||||||
|         info = Place._get_collection().index_information() |         info = Place._get_collection().index_information() | ||||||
|         info = [value['key'] for key, value in info.iteritems()] |         info = [value['key'] for key, value in iteritems(info)] | ||||||
|         self.assertTrue([('current.location.point', '2d')] in info) |         self.assertIn([('current.location.point', '2d')], info) | ||||||
|  |  | ||||||
|     def test_explicit_geosphere_index(self): |     def test_explicit_geosphere_index(self): | ||||||
|         """Ensure that geosphere indexes work when created via meta[indexes] |         """Ensure that geosphere indexes work when created via meta[indexes] | ||||||
| @@ -288,8 +287,8 @@ class IndexesTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         Place.ensure_indexes() |         Place.ensure_indexes() | ||||||
|         info = Place._get_collection().index_information() |         info = Place._get_collection().index_information() | ||||||
|         info = [value['key'] for key, value in info.iteritems()] |         info = [value['key'] for key, value in iteritems(info)] | ||||||
|         self.assertTrue([('location.point', '2dsphere')] in info) |         self.assertIn([('location.point', '2dsphere')], info) | ||||||
|  |  | ||||||
|     def test_explicit_geohaystack_index(self): |     def test_explicit_geohaystack_index(self): | ||||||
|         """Ensure that geohaystack indexes work when created via meta[indexes] |         """Ensure that geohaystack indexes work when created via meta[indexes] | ||||||
| @@ -310,8 +309,8 @@ class IndexesTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         Place.ensure_indexes() |         Place.ensure_indexes() | ||||||
|         info = Place._get_collection().index_information() |         info = Place._get_collection().index_information() | ||||||
|         info = [value['key'] for key, value in info.iteritems()] |         info = [value['key'] for key, value in iteritems(info)] | ||||||
|         self.assertTrue([('location.point', 'geoHaystack')] in info) |         self.assertIn([('location.point', 'geoHaystack')], info) | ||||||
|  |  | ||||||
|     def test_create_geohaystack_index(self): |     def test_create_geohaystack_index(self): | ||||||
|         """Ensure that geohaystack indexes can be created |         """Ensure that geohaystack indexes can be created | ||||||
| @@ -322,8 +321,8 @@ class IndexesTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         Place.create_index({'fields': (')location.point', 'name')}, bucketSize=10) |         Place.create_index({'fields': (')location.point', 'name')}, bucketSize=10) | ||||||
|         info = Place._get_collection().index_information() |         info = Place._get_collection().index_information() | ||||||
|         info = [value['key'] for key, value in info.iteritems()] |         info = [value['key'] for key, value in iteritems(info)] | ||||||
|         self.assertTrue([('location.point', 'geoHaystack'), ('name', 1)] in info) |         self.assertIn([('location.point', 'geoHaystack'), ('name', 1)], info) | ||||||
|  |  | ||||||
|     def test_dictionary_indexes(self): |     def test_dictionary_indexes(self): | ||||||
|         """Ensure that indexes are used when meta[indexes] contains |         """Ensure that indexes are used when meta[indexes] contains | ||||||
| @@ -355,8 +354,8 @@ class IndexesTest(unittest.TestCase): | |||||||
|         info = [(value['key'], |         info = [(value['key'], | ||||||
|                  value.get('unique', False), |                  value.get('unique', False), | ||||||
|                  value.get('sparse', False)) |                  value.get('sparse', False)) | ||||||
|                 for key, value in info.iteritems()] |                 for key, value in iteritems(info)] | ||||||
|         self.assertTrue(([('addDate', -1)], True, True) in info) |         self.assertIn(([('addDate', -1)], True, True), info) | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
| @@ -407,7 +406,7 @@ class IndexesTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         self.assertEqual(2, User.objects.count()) |         self.assertEqual(2, User.objects.count()) | ||||||
|         info = User.objects._collection.index_information() |         info = User.objects._collection.index_information() | ||||||
|         self.assertEqual(info.keys(), ['_id_']) |         self.assertEqual(list(info.keys()), ['_id_']) | ||||||
|  |  | ||||||
|         User.ensure_indexes() |         User.ensure_indexes() | ||||||
|         info = User.objects._collection.index_information() |         info = User.objects._collection.index_information() | ||||||
| @@ -476,7 +475,6 @@ class IndexesTest(unittest.TestCase): | |||||||
|     def test_covered_index(self): |     def test_covered_index(self): | ||||||
|         """Ensure that covered indexes can be used |         """Ensure that covered indexes can be used | ||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         class Test(Document): |         class Test(Document): | ||||||
|             a = IntField() |             a = IntField() | ||||||
|             b = IntField() |             b = IntField() | ||||||
| @@ -491,38 +489,41 @@ class IndexesTest(unittest.TestCase): | |||||||
|         obj = Test(a=1) |         obj = Test(a=1) | ||||||
|         obj.save() |         obj.save() | ||||||
|  |  | ||||||
|         IS_MONGODB_3 = get_mongodb_version()[0] >= 3 |  | ||||||
|  |  | ||||||
|         # Need to be explicit about covered indexes as mongoDB doesn't know if |         # Need to be explicit about covered indexes as mongoDB doesn't know if | ||||||
|         # the documents returned might have more keys in that here. |         # the documents returned might have more keys in that here. | ||||||
|         query_plan = Test.objects(id=obj.id).exclude('a').explain() |         query_plan = Test.objects(id=obj.id).exclude('a').explain() | ||||||
|         if not IS_MONGODB_3: |         self.assertEqual( | ||||||
|             self.assertFalse(query_plan['indexOnly']) |             query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), | ||||||
|         else: |             'IDHACK' | ||||||
|             self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), 'IDHACK') |         ) | ||||||
|  |  | ||||||
|         query_plan = Test.objects(id=obj.id).only('id').explain() |         query_plan = Test.objects(id=obj.id).only('id').explain() | ||||||
|         if not IS_MONGODB_3: |         self.assertEqual( | ||||||
|             self.assertTrue(query_plan['indexOnly']) |             query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), | ||||||
|         else: |             'IDHACK' | ||||||
|             self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), 'IDHACK') |         ) | ||||||
|  |  | ||||||
|         query_plan = Test.objects(a=1).only('a').exclude('id').explain() |         query_plan = Test.objects(a=1).only('a').exclude('id').explain() | ||||||
|         if not IS_MONGODB_3: |         self.assertEqual( | ||||||
|             self.assertTrue(query_plan['indexOnly']) |             query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), | ||||||
|         else: |             'IXSCAN' | ||||||
|             self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), 'IXSCAN') |         ) | ||||||
|             self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('stage'), 'PROJECTION') |         self.assertEqual( | ||||||
|  |             query_plan.get('queryPlanner').get('winningPlan').get('stage'), | ||||||
|  |             'PROJECTION' | ||||||
|  |         ) | ||||||
|  |  | ||||||
|         query_plan = Test.objects(a=1).explain() |         query_plan = Test.objects(a=1).explain() | ||||||
|         if not IS_MONGODB_3: |         self.assertEqual( | ||||||
|             self.assertFalse(query_plan['indexOnly']) |             query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), | ||||||
|         else: |             'IXSCAN' | ||||||
|             self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('inputStage').get('stage'), 'IXSCAN') |         ) | ||||||
|             self.assertEqual(query_plan.get('queryPlanner').get('winningPlan').get('stage'), 'FETCH') |         self.assertEqual( | ||||||
|  |             query_plan.get('queryPlanner').get('winningPlan').get('stage'), | ||||||
|  |             'FETCH' | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     def test_index_on_id(self): |     def test_index_on_id(self): | ||||||
|  |  | ||||||
|         class BlogPost(Document): |         class BlogPost(Document): | ||||||
|             meta = { |             meta = { | ||||||
|                 'indexes': [ |                 'indexes': [ | ||||||
| @@ -541,40 +542,46 @@ class IndexesTest(unittest.TestCase): | |||||||
|                                  [('categories', 1), ('_id', 1)]) |                                  [('categories', 1), ('_id', 1)]) | ||||||
|  |  | ||||||
|     def test_hint(self): |     def test_hint(self): | ||||||
|  |         TAGS_INDEX_NAME = 'tags_1' | ||||||
|  |  | ||||||
|         class BlogPost(Document): |         class BlogPost(Document): | ||||||
|             tags = ListField(StringField()) |             tags = ListField(StringField()) | ||||||
|             meta = { |             meta = { | ||||||
|                 'indexes': [ |                 'indexes': [ | ||||||
|                     'tags', |                     { | ||||||
|  |                         'fields': ['tags'], | ||||||
|  |                         'name': TAGS_INDEX_NAME | ||||||
|  |                     } | ||||||
|                 ], |                 ], | ||||||
|             } |             } | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|         for i in range(0, 10): |         for i in range(10): | ||||||
|             tags = [("tag %i" % n) for n in range(0, i % 2)] |             tags = [("tag %i" % n) for n in range(i % 2)] | ||||||
|             BlogPost(tags=tags).save() |             BlogPost(tags=tags).save() | ||||||
|  |  | ||||||
|         self.assertEqual(BlogPost.objects.count(), 10) |         # Hinting by shape should work. | ||||||
|  |         self.assertEqual(BlogPost.objects.hint([('tags', 1)]).count(), 10) | ||||||
|  |  | ||||||
|  |         # Hinting by index name should work. | ||||||
|  |         self.assertEqual(BlogPost.objects.hint(TAGS_INDEX_NAME).count(), 10) | ||||||
|  |  | ||||||
|  |         # Clearing the hint should work fine. | ||||||
|         self.assertEqual(BlogPost.objects.hint().count(), 10) |         self.assertEqual(BlogPost.objects.hint().count(), 10) | ||||||
|  |         self.assertEqual(BlogPost.objects.hint([('ZZ', 1)]).hint().count(), 10) | ||||||
|  |  | ||||||
|         # PyMongo 3.0 bug only, works correctly with 2.X and 3.0.1+ versions |         # Hinting on a non-existent index shape should fail. | ||||||
|         if pymongo.version != '3.0': |         with self.assertRaises(OperationFailure): | ||||||
|             self.assertEqual(BlogPost.objects.hint([('tags', 1)]).count(), 10) |             BlogPost.objects.hint([('ZZ', 1)]).count() | ||||||
|  |  | ||||||
|             self.assertEqual(BlogPost.objects.hint([('ZZ', 1)]).count(), 10) |         # Hinting on a non-existent index name should fail. | ||||||
|  |         with self.assertRaises(OperationFailure): | ||||||
|  |             BlogPost.objects.hint('Bad Name').count() | ||||||
|  |  | ||||||
|         if pymongo.version >= '2.8': |         # Invalid shape argument (missing list brackets) should fail. | ||||||
|             self.assertEqual(BlogPost.objects.hint('tags').count(), 10) |         with self.assertRaises(ValueError): | ||||||
|         else: |             BlogPost.objects.hint(('tags', 1)).count() | ||||||
|             def invalid_index(): |  | ||||||
|                 BlogPost.objects.hint('tags').next() |  | ||||||
|             self.assertRaises(TypeError, invalid_index) |  | ||||||
|  |  | ||||||
|         def invalid_index_2(): |  | ||||||
|             return BlogPost.objects.hint(('tags', 1)).next() |  | ||||||
|         self.assertRaises(Exception, invalid_index_2) |  | ||||||
|  |  | ||||||
|     def test_unique(self): |     def test_unique(self): | ||||||
|         """Ensure that uniqueness constraints are applied to fields. |         """Ensure that uniqueness constraints are applied to fields. | ||||||
| @@ -591,10 +598,32 @@ class IndexesTest(unittest.TestCase): | |||||||
|         # Two posts with the same slug is not allowed |         # Two posts with the same slug is not allowed | ||||||
|         post2 = BlogPost(title='test2', slug='test') |         post2 = BlogPost(title='test2', slug='test') | ||||||
|         self.assertRaises(NotUniqueError, post2.save) |         self.assertRaises(NotUniqueError, post2.save) | ||||||
|  |         self.assertRaises(NotUniqueError, BlogPost.objects.insert, post2) | ||||||
|  |  | ||||||
|         # Ensure backwards compatibilty for errors |         # Ensure backwards compatibility for errors | ||||||
|         self.assertRaises(OperationError, post2.save) |         self.assertRaises(OperationError, post2.save) | ||||||
|  |  | ||||||
|  |     def test_primary_key_unique_not_working(self): | ||||||
|  |         """Relates to #1445""" | ||||||
|  |         class Blog(Document): | ||||||
|  |             id = StringField(primary_key=True, unique=True) | ||||||
|  |  | ||||||
|  |         Blog.drop_collection() | ||||||
|  |  | ||||||
|  |         with self.assertRaises(OperationFailure) as ctx_err: | ||||||
|  |             Blog(id='garbage').save() | ||||||
|  |  | ||||||
|  |         # One of the errors below should happen. Which one depends on the | ||||||
|  |         # PyMongo version and dict order. | ||||||
|  |         err_msg = str(ctx_err.exception) | ||||||
|  |         self.assertTrue( | ||||||
|  |             any([ | ||||||
|  |                 "The field 'unique' is not valid for an _id index specification" in err_msg, | ||||||
|  |                 "The field 'background' is not valid for an _id index specification" in err_msg, | ||||||
|  |                 "The field 'sparse' is not valid for an _id index specification" in err_msg, | ||||||
|  |             ]) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     def test_unique_with(self): |     def test_unique_with(self): | ||||||
|         """Ensure that unique_with constraints are applied to fields. |         """Ensure that unique_with constraints are applied to fields. | ||||||
|         """ |         """ | ||||||
| @@ -676,6 +705,77 @@ class IndexesTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         self.assertRaises(NotUniqueError, post2.save) |         self.assertRaises(NotUniqueError, post2.save) | ||||||
|  |  | ||||||
|  |     def test_unique_embedded_document_in_sorted_list(self): | ||||||
|  |         """ | ||||||
|  |         Ensure that the uniqueness constraints are applied to fields in | ||||||
|  |         embedded documents, even when the embedded documents in a sorted list | ||||||
|  |         field. | ||||||
|  |         """ | ||||||
|  |         class SubDocument(EmbeddedDocument): | ||||||
|  |             year = IntField() | ||||||
|  |             slug = StringField(unique=True) | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             title = StringField() | ||||||
|  |             subs = SortedListField(EmbeddedDocumentField(SubDocument), | ||||||
|  |                                    ordering='year') | ||||||
|  |  | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         post1 = BlogPost( | ||||||
|  |             title='test1', subs=[ | ||||||
|  |                 SubDocument(year=2009, slug='conflict'), | ||||||
|  |                 SubDocument(year=2009, slug='conflict') | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |         post1.save() | ||||||
|  |  | ||||||
|  |         # confirm that the unique index is created | ||||||
|  |         indexes = BlogPost._get_collection().index_information() | ||||||
|  |         self.assertIn('subs.slug_1', indexes) | ||||||
|  |         self.assertTrue(indexes['subs.slug_1']['unique']) | ||||||
|  |  | ||||||
|  |         post2 = BlogPost( | ||||||
|  |             title='test2', subs=[SubDocument(year=2014, slug='conflict')] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         self.assertRaises(NotUniqueError, post2.save) | ||||||
|  |  | ||||||
|  |     def test_unique_embedded_document_in_embedded_document_list(self): | ||||||
|  |         """ | ||||||
|  |         Ensure that the uniqueness constraints are applied to fields in | ||||||
|  |         embedded documents, even when the embedded documents in an embedded | ||||||
|  |         list field. | ||||||
|  |         """ | ||||||
|  |         class SubDocument(EmbeddedDocument): | ||||||
|  |             year = IntField() | ||||||
|  |             slug = StringField(unique=True) | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             title = StringField() | ||||||
|  |             subs = EmbeddedDocumentListField(SubDocument) | ||||||
|  |  | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         post1 = BlogPost( | ||||||
|  |             title='test1', subs=[ | ||||||
|  |                 SubDocument(year=2009, slug='conflict'), | ||||||
|  |                 SubDocument(year=2009, slug='conflict') | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |         post1.save() | ||||||
|  |  | ||||||
|  |         # confirm that the unique index is created | ||||||
|  |         indexes = BlogPost._get_collection().index_information() | ||||||
|  |         self.assertIn('subs.slug_1', indexes) | ||||||
|  |         self.assertTrue(indexes['subs.slug_1']['unique']) | ||||||
|  |  | ||||||
|  |         post2 = BlogPost( | ||||||
|  |             title='test2', subs=[SubDocument(year=2014, slug='conflict')] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         self.assertRaises(NotUniqueError, post2.save) | ||||||
|  |  | ||||||
|     def test_unique_with_embedded_document_and_embedded_unique(self): |     def test_unique_with_embedded_document_and_embedded_unique(self): | ||||||
|         """Ensure that uniqueness constraints are applied to fields on |         """Ensure that uniqueness constraints are applied to fields on | ||||||
|         embedded documents.  And work with unique_with as well. |         embedded documents.  And work with unique_with as well. | ||||||
| @@ -727,6 +827,18 @@ class IndexesTest(unittest.TestCase): | |||||||
|         self.assertEqual(3600, |         self.assertEqual(3600, | ||||||
|                          info['created_1']['expireAfterSeconds']) |                          info['created_1']['expireAfterSeconds']) | ||||||
|  |  | ||||||
|  |     def test_index_drop_dups_silently_ignored(self): | ||||||
|  |         class Customer(Document): | ||||||
|  |             cust_id = IntField(unique=True, required=True) | ||||||
|  |             meta = { | ||||||
|  |                 'indexes': ['cust_id'], | ||||||
|  |                 'index_drop_dups': True, | ||||||
|  |                 'allow_inheritance': False, | ||||||
|  |             } | ||||||
|  |  | ||||||
|  |         Customer.drop_collection() | ||||||
|  |         Customer.objects.first() | ||||||
|  |  | ||||||
|     def test_unique_and_indexes(self): |     def test_unique_and_indexes(self): | ||||||
|         """Ensure that 'unique' constraints aren't overridden by |         """Ensure that 'unique' constraints aren't overridden by | ||||||
|         meta.indexes. |         meta.indexes. | ||||||
| @@ -743,18 +855,23 @@ class IndexesTest(unittest.TestCase): | |||||||
|         cust.save() |         cust.save() | ||||||
|  |  | ||||||
|         cust_dupe = Customer(cust_id=1) |         cust_dupe = Customer(cust_id=1) | ||||||
|         try: |         with self.assertRaises(NotUniqueError): | ||||||
|             cust_dupe.save() |             cust_dupe.save() | ||||||
|             raise AssertionError("We saved a dupe!") |  | ||||||
|         except NotUniqueError: |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|     def test_unique_and_primary(self): |         cust = Customer(cust_id=2) | ||||||
|  |         cust.save() | ||||||
|  |  | ||||||
|  |         # duplicate key on update | ||||||
|  |         with self.assertRaises(NotUniqueError): | ||||||
|  |             cust.cust_id = 1 | ||||||
|  |             cust.save() | ||||||
|  |  | ||||||
|  |     def test_primary_save_duplicate_update_existing_object(self): | ||||||
|         """If you set a field as primary, then unexpected behaviour can occur. |         """If you set a field as primary, then unexpected behaviour can occur. | ||||||
|         You won't create a duplicate but you will update an existing document. |         You won't create a duplicate but you will update an existing document. | ||||||
|         """ |         """ | ||||||
|         class User(Document): |         class User(Document): | ||||||
|             name = StringField(primary_key=True, unique=True) |             name = StringField(primary_key=True) | ||||||
|             password = StringField() |             password = StringField() | ||||||
|  |  | ||||||
|         User.drop_collection() |         User.drop_collection() | ||||||
| @@ -801,9 +918,9 @@ class IndexesTest(unittest.TestCase): | |||||||
|             self.fail('Unbound local error at index + pk definition') |             self.fail('Unbound local error at index + pk definition') | ||||||
|  |  | ||||||
|         info = BlogPost.objects._collection.index_information() |         info = BlogPost.objects._collection.index_information() | ||||||
|         info = [value['key'] for key, value in info.iteritems()] |         info = [value['key'] for key, value in iteritems(info)] | ||||||
|         index_item = [('_id', 1), ('comments.comment_id', 1)] |         index_item = [('_id', 1), ('comments.comment_id', 1)] | ||||||
|         self.assertTrue(index_item in info) |         self.assertIn(index_item, info) | ||||||
|  |  | ||||||
|     def test_compound_key_embedded(self): |     def test_compound_key_embedded(self): | ||||||
|  |  | ||||||
| @@ -849,9 +966,9 @@ class IndexesTest(unittest.TestCase): | |||||||
|             } |             } | ||||||
|  |  | ||||||
|         info = MyDoc.objects._collection.index_information() |         info = MyDoc.objects._collection.index_information() | ||||||
|         info = [value['key'] for key, value in info.iteritems()] |         info = [value['key'] for key, value in iteritems(info)] | ||||||
|         self.assertTrue([('provider_ids.foo', 1)] in info) |         self.assertIn([('provider_ids.foo', 1)], info) | ||||||
|         self.assertTrue([('provider_ids.bar', 1)] in info) |         self.assertIn([('provider_ids.bar', 1)], info) | ||||||
|  |  | ||||||
|     def test_sparse_compound_indexes(self): |     def test_sparse_compound_indexes(self): | ||||||
|  |  | ||||||
| @@ -867,7 +984,6 @@ class IndexesTest(unittest.TestCase): | |||||||
|                          info['provider_ids.foo_1_provider_ids.bar_1']['key']) |                          info['provider_ids.foo_1_provider_ids.bar_1']['key']) | ||||||
|         self.assertTrue(info['provider_ids.foo_1_provider_ids.bar_1']['sparse']) |         self.assertTrue(info['provider_ids.foo_1_provider_ids.bar_1']['sparse']) | ||||||
|  |  | ||||||
|     @needs_mongodb_v26 |  | ||||||
|     def test_text_indexes(self): |     def test_text_indexes(self): | ||||||
|         class Book(Document): |         class Book(Document): | ||||||
|             title = DictField() |             title = DictField() | ||||||
| @@ -876,9 +992,9 @@ class IndexesTest(unittest.TestCase): | |||||||
|             } |             } | ||||||
|  |  | ||||||
|         indexes = Book.objects._collection.index_information() |         indexes = Book.objects._collection.index_information() | ||||||
|         self.assertTrue("title_text" in indexes) |         self.assertIn("title_text", indexes) | ||||||
|         key = indexes["title_text"]["key"] |         key = indexes["title_text"]["key"] | ||||||
|         self.assertTrue(('_fts', 'text') in key) |         self.assertIn(('_fts', 'text'), key) | ||||||
|  |  | ||||||
|     def test_hashed_indexes(self): |     def test_hashed_indexes(self): | ||||||
|  |  | ||||||
| @@ -889,8 +1005,8 @@ class IndexesTest(unittest.TestCase): | |||||||
|             } |             } | ||||||
|  |  | ||||||
|         indexes = Book.objects._collection.index_information() |         indexes = Book.objects._collection.index_information() | ||||||
|         self.assertTrue("ref_id_hashed" in indexes) |         self.assertIn("ref_id_hashed", indexes) | ||||||
|         self.assertTrue(('ref_id', 'hashed') in indexes["ref_id_hashed"]["key"]) |         self.assertIn(('ref_id', 'hashed'), indexes["ref_id_hashed"]["key"]) | ||||||
|  |  | ||||||
|     def test_indexes_after_database_drop(self): |     def test_indexes_after_database_drop(self): | ||||||
|         """ |         """ | ||||||
| @@ -931,7 +1047,6 @@ class IndexesTest(unittest.TestCase): | |||||||
|             # Drop the temporary database at the end |             # Drop the temporary database at the end | ||||||
|             connection.drop_database('tempdatabase') |             connection.drop_database('tempdatabase') | ||||||
|  |  | ||||||
|  |  | ||||||
|     def test_index_dont_send_cls_option(self): |     def test_index_dont_send_cls_option(self): | ||||||
|         """ |         """ | ||||||
|         Ensure that 'cls' option is not sent through ensureIndex. We shouldn't |         Ensure that 'cls' option is not sent through ensureIndex. We shouldn't | ||||||
| @@ -1013,7 +1128,7 @@ class IndexesTest(unittest.TestCase): | |||||||
|         TestDoc.ensure_indexes() |         TestDoc.ensure_indexes() | ||||||
|  |  | ||||||
|         index_info = TestDoc._get_collection().index_information() |         index_info = TestDoc._get_collection().index_information() | ||||||
|         self.assertTrue('shard_1_1__cls_1_txt_1_1' in index_info) |         self.assertIn('shard_1_1__cls_1_txt_1_1', index_info) | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|   | |||||||
| @@ -2,30 +2,45 @@ | |||||||
| import unittest | import unittest | ||||||
| import warnings | import warnings | ||||||
|  |  | ||||||
| from datetime import datetime | from six import iteritems | ||||||
|  |  | ||||||
|  | from mongoengine import (BooleanField, Document, EmbeddedDocument, | ||||||
|  |                          EmbeddedDocumentField, GenericReferenceField, | ||||||
|  |                          IntField, ReferenceField, StringField) | ||||||
|  | from mongoengine.pymongo_support import list_collection_names | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
| from tests.fixtures import Base | from tests.fixtures import Base | ||||||
|  |  | ||||||
| from mongoengine import Document, EmbeddedDocument, connect |  | ||||||
| from mongoengine.connection import get_db |  | ||||||
| from mongoengine.fields import (BooleanField, GenericReferenceField, |  | ||||||
|                                 IntField, StringField) |  | ||||||
|  |  | ||||||
| __all__ = ('InheritanceTest', ) | __all__ = ('InheritanceTest', ) | ||||||
|  |  | ||||||
|  |  | ||||||
| class InheritanceTest(unittest.TestCase): | class InheritanceTest(MongoDBTestCase): | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         connect(db='mongoenginetest') |  | ||||||
|         self.db = get_db() |  | ||||||
|  |  | ||||||
|     def tearDown(self): |     def tearDown(self): | ||||||
|         for collection in self.db.collection_names(): |         for collection in list_collection_names(self.db): | ||||||
|             if 'system.' in collection: |  | ||||||
|                 continue |  | ||||||
|             self.db.drop_collection(collection) |             self.db.drop_collection(collection) | ||||||
|  |  | ||||||
|  |     def test_constructor_cls(self): | ||||||
|  |         # Ensures _cls is properly set during construction | ||||||
|  |         # and when object gets reloaded (prevent regression of #1950) | ||||||
|  |         class EmbedData(EmbeddedDocument): | ||||||
|  |             data = StringField() | ||||||
|  |             meta = {'allow_inheritance': True} | ||||||
|  |  | ||||||
|  |         class DataDoc(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             embed = EmbeddedDocumentField(EmbedData) | ||||||
|  |             meta = {'allow_inheritance': True} | ||||||
|  |  | ||||||
|  |         test_doc = DataDoc(name='test', embed=EmbedData(data='data')) | ||||||
|  |         self.assertEqual(test_doc._cls, 'DataDoc') | ||||||
|  |         self.assertEqual(test_doc.embed._cls, 'EmbedData') | ||||||
|  |         test_doc.save() | ||||||
|  |         saved_doc = DataDoc.objects.with_id(test_doc.id) | ||||||
|  |         self.assertEqual(test_doc._cls, saved_doc._cls) | ||||||
|  |         self.assertEqual(test_doc.embed._cls, saved_doc.embed._cls) | ||||||
|  |         test_doc.delete() | ||||||
|  |  | ||||||
|     def test_superclasses(self): |     def test_superclasses(self): | ||||||
|         """Ensure that the correct list of superclasses is assembled. |         """Ensure that the correct list of superclasses is assembled. | ||||||
|         """ |         """ | ||||||
| @@ -258,9 +273,10 @@ class InheritanceTest(unittest.TestCase): | |||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|         # can't inherit because Animal didn't explicitly allow inheritance |         # can't inherit because Animal didn't explicitly allow inheritance | ||||||
|         with self.assertRaises(ValueError): |         with self.assertRaises(ValueError) as cm: | ||||||
|             class Dog(Animal): |             class Dog(Animal): | ||||||
|                 pass |                 pass | ||||||
|  |         self.assertIn("Document Animal may not be subclassed", str(cm.exception)) | ||||||
|  |  | ||||||
|         # Check that _cls etc aren't present on simple documents |         # Check that _cls etc aren't present on simple documents | ||||||
|         dog = Animal(name='dog').save() |         dog = Animal(name='dog').save() | ||||||
| @@ -268,7 +284,7 @@ class InheritanceTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         collection = self.db[Animal._get_collection_name()] |         collection = self.db[Animal._get_collection_name()] | ||||||
|         obj = collection.find_one() |         obj = collection.find_one() | ||||||
|         self.assertFalse('_cls' in obj) |         self.assertNotIn('_cls', obj) | ||||||
|  |  | ||||||
|     def test_cant_turn_off_inheritance_on_subclass(self): |     def test_cant_turn_off_inheritance_on_subclass(self): | ||||||
|         """Ensure if inheritance is on in a subclass you cant turn it off. |         """Ensure if inheritance is on in a subclass you cant turn it off. | ||||||
| @@ -277,9 +293,10 @@ class InheritanceTest(unittest.TestCase): | |||||||
|             name = StringField() |             name = StringField() | ||||||
|             meta = {'allow_inheritance': True} |             meta = {'allow_inheritance': True} | ||||||
|  |  | ||||||
|         with self.assertRaises(ValueError): |         with self.assertRaises(ValueError) as cm: | ||||||
|             class Mammal(Animal): |             class Mammal(Animal): | ||||||
|                 meta = {'allow_inheritance': False} |                 meta = {'allow_inheritance': False} | ||||||
|  |         self.assertEqual(str(cm.exception), 'Only direct subclasses of Document may set "allow_inheritance" to False') | ||||||
|  |  | ||||||
|     def test_allow_inheritance_abstract_document(self): |     def test_allow_inheritance_abstract_document(self): | ||||||
|         """Ensure that abstract documents can set inheritance rules and that |         """Ensure that abstract documents can set inheritance rules and that | ||||||
| @@ -292,13 +309,48 @@ class InheritanceTest(unittest.TestCase): | |||||||
|         class Animal(FinalDocument): |         class Animal(FinalDocument): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|         with self.assertRaises(ValueError): |         with self.assertRaises(ValueError) as cm: | ||||||
|             class Mammal(Animal): |             class Mammal(Animal): | ||||||
|                 pass |                 pass | ||||||
|  |  | ||||||
|         # Check that _cls isn't present in simple documents |         # Check that _cls isn't present in simple documents | ||||||
|         doc = Animal(name='dog') |         doc = Animal(name='dog') | ||||||
|         self.assertFalse('_cls' in doc.to_mongo()) |         self.assertNotIn('_cls', doc.to_mongo()) | ||||||
|  |  | ||||||
|  |     def test_using_abstract_class_in_reference_field(self): | ||||||
|  |         # Ensures no regression of #1920 | ||||||
|  |         class AbstractHuman(Document): | ||||||
|  |             meta = {'abstract': True} | ||||||
|  |  | ||||||
|  |         class Dad(AbstractHuman): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Home(Document): | ||||||
|  |             dad = ReferenceField(AbstractHuman)  # Referencing the abstract class | ||||||
|  |             address = StringField() | ||||||
|  |  | ||||||
|  |         dad = Dad(name='5').save() | ||||||
|  |         Home(dad=dad, address='street').save() | ||||||
|  |  | ||||||
|  |         home = Home.objects.first() | ||||||
|  |         home.address = 'garbage' | ||||||
|  |         home.save()     # Was failing with ValidationError | ||||||
|  |  | ||||||
|  |     def test_abstract_class_referencing_self(self): | ||||||
|  |         # Ensures no regression of #1920 | ||||||
|  |         class Human(Document): | ||||||
|  |             meta = {'abstract': True} | ||||||
|  |             creator = ReferenceField('self', dbref=True) | ||||||
|  |  | ||||||
|  |         class User(Human): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         user = User(name='John').save() | ||||||
|  |         user2 = User(name='Foo', creator=user).save() | ||||||
|  |  | ||||||
|  |         user2 = User.objects.with_id(user2.id) | ||||||
|  |         user2.name = 'Bar' | ||||||
|  |         user2.save()    # Was failing with ValidationError | ||||||
|  |  | ||||||
|     def test_abstract_handle_ids_in_metaclass_properly(self): |     def test_abstract_handle_ids_in_metaclass_properly(self): | ||||||
|  |  | ||||||
| @@ -358,11 +410,11 @@ class InheritanceTest(unittest.TestCase): | |||||||
|             meta = {'abstract': True, |             meta = {'abstract': True, | ||||||
|                     'allow_inheritance': False} |                     'allow_inheritance': False} | ||||||
|  |  | ||||||
|         bkk = City(continent='asia') |         city = City(continent='asia') | ||||||
|         self.assertEqual(None, bkk.pk) |         self.assertEqual(None, city.pk) | ||||||
|         # TODO: expected error? Shouldn't we create a new error type? |         # TODO: expected error? Shouldn't we create a new error type? | ||||||
|         with self.assertRaises(KeyError): |         with self.assertRaises(KeyError): | ||||||
|             setattr(bkk, 'pk', 1) |             setattr(city, 'pk', 1) | ||||||
|  |  | ||||||
|     def test_allow_inheritance_embedded_document(self): |     def test_allow_inheritance_embedded_document(self): | ||||||
|         """Ensure embedded documents respect inheritance.""" |         """Ensure embedded documents respect inheritance.""" | ||||||
| @@ -374,14 +426,14 @@ class InheritanceTest(unittest.TestCase): | |||||||
|                 pass |                 pass | ||||||
|  |  | ||||||
|         doc = Comment(content='test') |         doc = Comment(content='test') | ||||||
|         self.assertFalse('_cls' in doc.to_mongo()) |         self.assertNotIn('_cls', doc.to_mongo()) | ||||||
|  |  | ||||||
|         class Comment(EmbeddedDocument): |         class Comment(EmbeddedDocument): | ||||||
|             content = StringField() |             content = StringField() | ||||||
|             meta = {'allow_inheritance': True} |             meta = {'allow_inheritance': True} | ||||||
|  |  | ||||||
|         doc = Comment(content='test') |         doc = Comment(content='test') | ||||||
|         self.assertTrue('_cls' in doc.to_mongo()) |         self.assertIn('_cls', doc.to_mongo()) | ||||||
|  |  | ||||||
|     def test_document_inheritance(self): |     def test_document_inheritance(self): | ||||||
|         """Ensure mutliple inheritance of abstract documents |         """Ensure mutliple inheritance of abstract documents | ||||||
| @@ -430,12 +482,12 @@ class InheritanceTest(unittest.TestCase): | |||||||
|             meta = {'abstract': True} |             meta = {'abstract': True} | ||||||
|         class Human(Mammal): pass |         class Human(Mammal): pass | ||||||
|  |  | ||||||
|         for k, v in defaults.iteritems(): |         for k, v in iteritems(defaults): | ||||||
|             for cls in [Animal, Fish, Guppy]: |             for cls in [Animal, Fish, Guppy]: | ||||||
|                 self.assertEqual(cls._meta[k], v) |                 self.assertEqual(cls._meta[k], v) | ||||||
|  |  | ||||||
|         self.assertFalse('collection' in Animal._meta) |         self.assertNotIn('collection', Animal._meta) | ||||||
|         self.assertFalse('collection' in Mammal._meta) |         self.assertNotIn('collection', Mammal._meta) | ||||||
|  |  | ||||||
|         self.assertEqual(Animal._get_collection_name(), None) |         self.assertEqual(Animal._get_collection_name(), None) | ||||||
|         self.assertEqual(Mammal._get_collection_name(), None) |         self.assertEqual(Mammal._get_collection_name(), None) | ||||||
|   | |||||||
| @@ -1,28 +1,30 @@ | |||||||
| # -*- coding: utf-8 -*- | # -*- coding: utf-8 -*- | ||||||
| import bson |  | ||||||
| import os | import os | ||||||
| import pickle | import pickle | ||||||
| import unittest | import unittest | ||||||
| import uuid | import uuid | ||||||
| import weakref | import weakref | ||||||
|  |  | ||||||
| from datetime import datetime | from datetime import datetime | ||||||
|  |  | ||||||
|  | import bson | ||||||
| from bson import DBRef, ObjectId | from bson import DBRef, ObjectId | ||||||
| from tests import fixtures | from pymongo.errors import DuplicateKeyError | ||||||
| from tests.fixtures import (PickleEmbedded, PickleTest, PickleSignalsTest, | from six import iteritems | ||||||
|                             PickleDynamicEmbedded, PickleDynamicTest) |  | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| from mongoengine.base import get_document, _document_registry |  | ||||||
| from mongoengine.connection import get_db |  | ||||||
| from mongoengine.errors import (NotRegistered, InvalidDocumentError, |  | ||||||
|                                 InvalidQueryError, NotUniqueError, |  | ||||||
|                                 FieldDoesNotExist, SaveConditionError) |  | ||||||
| from mongoengine.queryset import NULLIFY, Q |  | ||||||
| from mongoengine.context_managers import switch_db, query_counter |  | ||||||
| from mongoengine import signals | from mongoengine import signals | ||||||
|  | from mongoengine.base import _document_registry, get_document | ||||||
| from tests.utils import needs_mongodb_v26 | from mongoengine.connection import get_db | ||||||
|  | from mongoengine.context_managers import query_counter, switch_db | ||||||
|  | from mongoengine.errors import (FieldDoesNotExist, InvalidDocumentError, \ | ||||||
|  |                                 InvalidQueryError, NotRegistered, NotUniqueError, SaveConditionError) | ||||||
|  | from mongoengine.mongodb_support import MONGODB_34, MONGODB_36, get_mongodb_version | ||||||
|  | from mongoengine.pymongo_support import list_collection_names | ||||||
|  | from mongoengine.queryset import NULLIFY, Q | ||||||
|  | from tests import fixtures | ||||||
|  | from tests.fixtures import (PickleDynamicEmbedded, PickleDynamicTest, \ | ||||||
|  |                             PickleEmbedded, PickleSignalsTest, PickleTest) | ||||||
|  | from tests.utils import MongoDBTestCase, get_as_pymongo | ||||||
|  |  | ||||||
| TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), | TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), | ||||||
|                                '../fields/mongoengine.png') |                                '../fields/mongoengine.png') | ||||||
| @@ -30,12 +32,9 @@ TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), | |||||||
| __all__ = ("InstanceTest",) | __all__ = ("InstanceTest",) | ||||||
|  |  | ||||||
|  |  | ||||||
| class InstanceTest(unittest.TestCase): | class InstanceTest(MongoDBTestCase): | ||||||
|  |  | ||||||
|     def setUp(self): |     def setUp(self): | ||||||
|         connect(db='mongoenginetest') |  | ||||||
|         self.db = get_db() |  | ||||||
|  |  | ||||||
|         class Job(EmbeddedDocument): |         class Job(EmbeddedDocument): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             years = IntField() |             years = IntField() | ||||||
| @@ -53,9 +52,7 @@ class InstanceTest(unittest.TestCase): | |||||||
|         self.Job = Job |         self.Job = Job | ||||||
|  |  | ||||||
|     def tearDown(self): |     def tearDown(self): | ||||||
|         for collection in self.db.collection_names(): |         for collection in list_collection_names(self.db): | ||||||
|             if 'system.' in collection: |  | ||||||
|                 continue |  | ||||||
|             self.db.drop_collection(collection) |             self.db.drop_collection(collection) | ||||||
|  |  | ||||||
|     def assertDbEqual(self, docs): |     def assertDbEqual(self, docs): | ||||||
| @@ -336,41 +333,36 @@ class InstanceTest(unittest.TestCase): | |||||||
|         self.assertEqual(User._fields['username'].db_field, '_id') |         self.assertEqual(User._fields['username'].db_field, '_id') | ||||||
|         self.assertEqual(User._meta['id_field'], 'username') |         self.assertEqual(User._meta['id_field'], 'username') | ||||||
|  |  | ||||||
|         # test no primary key field |         User.objects.create(username='test', name='test user') | ||||||
|         self.assertRaises(ValidationError, User(name='test').save) |         user = User.objects.first() | ||||||
|  |         self.assertEqual(user.id, 'test') | ||||||
|  |         self.assertEqual(user.pk, 'test') | ||||||
|  |         user_dict = User.objects._collection.find_one() | ||||||
|  |         self.assertEqual(user_dict['_id'], 'test') | ||||||
|  |  | ||||||
|         # define a subclass with a different primary key field than the |     def test_change_custom_id_field_in_subclass(self): | ||||||
|         # parent |         """Subclasses cannot override which field is the primary key.""" | ||||||
|         with self.assertRaises(ValueError): |         class User(Document): | ||||||
|  |             username = StringField(primary_key=True) | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {'allow_inheritance': True} | ||||||
|  |  | ||||||
|  |         with self.assertRaises(ValueError) as e: | ||||||
|             class EmailUser(User): |             class EmailUser(User): | ||||||
|                 email = StringField(primary_key=True) |                 email = StringField(primary_key=True) | ||||||
|  |         exc = e.exception | ||||||
|  |         self.assertEqual(str(exc), 'Cannot override primary key field') | ||||||
|  |  | ||||||
|         class EmailUser(User): |     def test_custom_id_field_is_required(self): | ||||||
|             email = StringField() |         """Ensure the custom primary key field is required.""" | ||||||
|  |         class User(Document): | ||||||
|  |             username = StringField(primary_key=True) | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|         user = User(username='test', name='test user') |         with self.assertRaises(ValidationError) as e: | ||||||
|         user.save() |             User(name='test').save() | ||||||
|  |         exc = e.exception | ||||||
|         user_obj = User.objects.first() |         self.assertTrue("Field is required: ['username']" in str(exc)) | ||||||
|         self.assertEqual(user_obj.id, 'test') |  | ||||||
|         self.assertEqual(user_obj.pk, 'test') |  | ||||||
|  |  | ||||||
|         user_son = User.objects._collection.find_one() |  | ||||||
|         self.assertEqual(user_son['_id'], 'test') |  | ||||||
|         self.assertTrue('username' not in user_son['_id']) |  | ||||||
|  |  | ||||||
|         User.drop_collection() |  | ||||||
|  |  | ||||||
|         user = User(pk='mongo', name='mongo user') |  | ||||||
|         user.save() |  | ||||||
|  |  | ||||||
|         user_obj = User.objects.first() |  | ||||||
|         self.assertEqual(user_obj.id, 'mongo') |  | ||||||
|         self.assertEqual(user_obj.pk, 'mongo') |  | ||||||
|  |  | ||||||
|         user_son = User.objects._collection.find_one() |  | ||||||
|         self.assertEqual(user_son['_id'], 'mongo') |  | ||||||
|         self.assertTrue('username' not in user_son['_id']) |  | ||||||
|  |  | ||||||
|     def test_document_not_registered(self): |     def test_document_not_registered(self): | ||||||
|         class Place(Document): |         class Place(Document): | ||||||
| @@ -419,6 +411,12 @@ class InstanceTest(unittest.TestCase): | |||||||
|         person.save() |         person.save() | ||||||
|         person.to_dbref() |         person.to_dbref() | ||||||
|  |  | ||||||
|  |     def test_key_like_attribute_access(self): | ||||||
|  |         person = self.Person(age=30) | ||||||
|  |         self.assertEqual(person['age'], 30) | ||||||
|  |         with self.assertRaises(KeyError): | ||||||
|  |             person['unknown_attr'] | ||||||
|  |  | ||||||
|     def test_save_abstract_document(self): |     def test_save_abstract_document(self): | ||||||
|         """Saving an abstract document should fail.""" |         """Saving an abstract document should fail.""" | ||||||
|         class Doc(Document): |         class Doc(Document): | ||||||
| @@ -461,7 +459,16 @@ class InstanceTest(unittest.TestCase): | |||||||
|         Animal.drop_collection() |         Animal.drop_collection() | ||||||
|         doc = Animal(superphylum='Deuterostomia') |         doc = Animal(superphylum='Deuterostomia') | ||||||
|         doc.save() |         doc.save() | ||||||
|         doc.reload() |  | ||||||
|  |         mongo_db = get_mongodb_version() | ||||||
|  |         CMD_QUERY_KEY = 'command' if mongo_db >= MONGODB_36 else 'query' | ||||||
|  |  | ||||||
|  |         with query_counter() as q: | ||||||
|  |             doc.reload() | ||||||
|  |             query_op = q.db.system.profile.find({'ns': 'mongoenginetest.animal'})[0] | ||||||
|  |             self.assertEqual(set(query_op[CMD_QUERY_KEY]['filter'].keys()), set(['_id', 'superphylum'])) | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |  | ||||||
|     def test_reload_sharded_nested(self): |     def test_reload_sharded_nested(self): | ||||||
|         class SuperPhylum(EmbeddedDocument): |         class SuperPhylum(EmbeddedDocument): | ||||||
| @@ -475,6 +482,34 @@ class InstanceTest(unittest.TestCase): | |||||||
|         doc = Animal(superphylum=SuperPhylum(name='Deuterostomia')) |         doc = Animal(superphylum=SuperPhylum(name='Deuterostomia')) | ||||||
|         doc.save() |         doc.save() | ||||||
|         doc.reload() |         doc.reload() | ||||||
|  |         Animal.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_update_shard_key_routing(self): | ||||||
|  |         """Ensures updating a doc with a specified shard_key includes it in | ||||||
|  |         the query. | ||||||
|  |         """ | ||||||
|  |         class Animal(Document): | ||||||
|  |             is_mammal = BooleanField() | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {'shard_key': ('is_mammal', 'id')} | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         doc = Animal(is_mammal=True, name='Dog') | ||||||
|  |         doc.save() | ||||||
|  |  | ||||||
|  |         mongo_db = get_mongodb_version() | ||||||
|  |  | ||||||
|  |         with query_counter() as q: | ||||||
|  |             doc.name = 'Cat' | ||||||
|  |             doc.save() | ||||||
|  |             query_op = q.db.system.profile.find({'ns': 'mongoenginetest.animal'})[0] | ||||||
|  |             self.assertEqual(query_op['op'], 'update') | ||||||
|  |             if mongo_db <= MONGODB_34: | ||||||
|  |                 self.assertEqual(set(query_op['query'].keys()), set(['_id', 'is_mammal'])) | ||||||
|  |             else: | ||||||
|  |                 self.assertEqual(set(query_op['command']['q'].keys()), set(['_id', 'is_mammal'])) | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |  | ||||||
|     def test_reload_with_changed_fields(self): |     def test_reload_with_changed_fields(self): | ||||||
|         """Ensures reloading will not affect changed fields""" |         """Ensures reloading will not affect changed fields""" | ||||||
| @@ -550,21 +585,14 @@ class InstanceTest(unittest.TestCase): | |||||||
|             pass |             pass | ||||||
|  |  | ||||||
|         f = Foo() |         f = Foo() | ||||||
|         try: |         with self.assertRaises(Foo.DoesNotExist): | ||||||
|             f.reload() |             f.reload() | ||||||
|         except Foo.DoesNotExist: |  | ||||||
|             pass |  | ||||||
|         except Exception: |  | ||||||
|             self.assertFalse("Threw wrong exception") |  | ||||||
|  |  | ||||||
|         f.save() |         f.save() | ||||||
|         f.delete() |         f.delete() | ||||||
|         try: |  | ||||||
|  |         with self.assertRaises(Foo.DoesNotExist): | ||||||
|             f.reload() |             f.reload() | ||||||
|         except Foo.DoesNotExist: |  | ||||||
|             pass |  | ||||||
|         except Exception: |  | ||||||
|             self.assertFalse("Threw wrong exception") |  | ||||||
|  |  | ||||||
|     def test_reload_of_non_strict_with_special_field_name(self): |     def test_reload_of_non_strict_with_special_field_name(self): | ||||||
|         """Ensures reloading works for documents with meta strict == False.""" |         """Ensures reloading works for documents with meta strict == False.""" | ||||||
| @@ -577,7 +605,7 @@ class InstanceTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         Post.drop_collection() |         Post.drop_collection() | ||||||
|  |  | ||||||
|         Post._get_collection().insert({ |         Post._get_collection().insert_one({ | ||||||
|             "title": "Items eclipse", |             "title": "Items eclipse", | ||||||
|             "items": ["more lorem", "even more ipsum"] |             "items": ["more lorem", "even more ipsum"] | ||||||
|         }) |         }) | ||||||
| @@ -601,10 +629,10 @@ class InstanceTest(unittest.TestCase): | |||||||
|         # Length = length(assigned fields + id) |         # Length = length(assigned fields + id) | ||||||
|         self.assertEqual(len(person), 5) |         self.assertEqual(len(person), 5) | ||||||
|  |  | ||||||
|         self.assertTrue('age' in person) |         self.assertIn('age', person) | ||||||
|         person.age = None |         person.age = None | ||||||
|         self.assertFalse('age' in person) |         self.assertNotIn('age', person) | ||||||
|         self.assertFalse('nationality' in person) |         self.assertNotIn('nationality', person) | ||||||
|  |  | ||||||
|     def test_embedded_document_to_mongo(self): |     def test_embedded_document_to_mongo(self): | ||||||
|         class Person(EmbeddedDocument): |         class Person(EmbeddedDocument): | ||||||
| @@ -634,8 +662,8 @@ class InstanceTest(unittest.TestCase): | |||||||
|         class Comment(EmbeddedDocument): |         class Comment(EmbeddedDocument): | ||||||
|             content = StringField() |             content = StringField() | ||||||
|  |  | ||||||
|         self.assertTrue('content' in Comment._fields) |         self.assertIn('content', Comment._fields) | ||||||
|         self.assertFalse('id' in Comment._fields) |         self.assertNotIn('id', Comment._fields) | ||||||
|  |  | ||||||
|     def test_embedded_document_instance(self): |     def test_embedded_document_instance(self): | ||||||
|         """Ensure that embedded documents can reference parent instance.""" |         """Ensure that embedded documents can reference parent instance.""" | ||||||
| @@ -717,39 +745,78 @@ class InstanceTest(unittest.TestCase): | |||||||
|         acc1 = Account.objects.first() |         acc1 = Account.objects.first() | ||||||
|         self.assertHasInstance(acc1._data["emails"][0], acc1) |         self.assertHasInstance(acc1._data["emails"][0], acc1) | ||||||
|  |  | ||||||
|  |     def test_save_checks_that_clean_is_called(self): | ||||||
|  |         class CustomError(Exception): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class TestDocument(Document): | ||||||
|  |             def clean(self): | ||||||
|  |                 raise CustomError() | ||||||
|  |  | ||||||
|  |         with self.assertRaises(CustomError): | ||||||
|  |             TestDocument().save() | ||||||
|  |  | ||||||
|  |         TestDocument().save(clean=False) | ||||||
|  |  | ||||||
|  |     def test_save_signal_pre_save_post_validation_makes_change_to_doc(self): | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             content = StringField() | ||||||
|  |  | ||||||
|  |             @classmethod | ||||||
|  |             def pre_save_post_validation(cls, sender, document, **kwargs): | ||||||
|  |                 document.content = 'checked' | ||||||
|  |  | ||||||
|  |         signals.pre_save_post_validation.connect(BlogPost.pre_save_post_validation, sender=BlogPost) | ||||||
|  |  | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         post = BlogPost(content='unchecked').save() | ||||||
|  |         self.assertEqual(post.content, 'checked') | ||||||
|  |         # Make sure pre_save_post_validation changes makes it to the db | ||||||
|  |         raw_doc = get_as_pymongo(post) | ||||||
|  |         self.assertEqual( | ||||||
|  |             raw_doc, | ||||||
|  |             { | ||||||
|  |                 'content': 'checked', | ||||||
|  |                 '_id': post.id | ||||||
|  |             }) | ||||||
|  |  | ||||||
|  |         # Important to disconnect as it could cause some assertions in test_signals | ||||||
|  |         # to fail (due to the garbage collection timing of this signal) | ||||||
|  |         signals.pre_save_post_validation.disconnect(BlogPost.pre_save_post_validation) | ||||||
|  |  | ||||||
|     def test_document_clean(self): |     def test_document_clean(self): | ||||||
|         class TestDocument(Document): |         class TestDocument(Document): | ||||||
|             status = StringField() |             status = StringField() | ||||||
|             pub_date = DateTimeField() |             cleaned = BooleanField(default=False) | ||||||
|  |  | ||||||
|             def clean(self): |             def clean(self): | ||||||
|                 if self.status == 'draft' and self.pub_date is not None: |                 self.cleaned = True | ||||||
|                     msg = 'Draft entries may not have a publication date.' |  | ||||||
|                     raise ValidationError(msg) |  | ||||||
|                 # Set the pub_date for published items if not set. |  | ||||||
|                 if self.status == 'published' and self.pub_date is None: |  | ||||||
|                     self.pub_date = datetime.now() |  | ||||||
|  |  | ||||||
|         TestDocument.drop_collection() |         TestDocument.drop_collection() | ||||||
|  |  | ||||||
|         t = TestDocument(status="draft", pub_date=datetime.now()) |         t = TestDocument(status="draft") | ||||||
|  |  | ||||||
|         try: |  | ||||||
|             t.save() |  | ||||||
|         except ValidationError as e: |  | ||||||
|             expect_msg = "Draft entries may not have a publication date." |  | ||||||
|             self.assertTrue(expect_msg in e.message) |  | ||||||
|             self.assertEqual(e.to_dict(), {'__all__': expect_msg}) |  | ||||||
|  |  | ||||||
|  |         # Ensure clean=False prevent call to clean | ||||||
|         t = TestDocument(status="published") |         t = TestDocument(status="published") | ||||||
|         t.save(clean=False) |         t.save(clean=False) | ||||||
|  |         self.assertEqual(t.status, "published") | ||||||
|         self.assertEqual(t.pub_date, None) |         self.assertEqual(t.cleaned, False) | ||||||
|  |  | ||||||
|         t = TestDocument(status="published") |         t = TestDocument(status="published") | ||||||
|  |         self.assertEqual(t.cleaned, False) | ||||||
|         t.save(clean=True) |         t.save(clean=True) | ||||||
|  |         self.assertEqual(t.status, "published") | ||||||
|         self.assertEqual(type(t.pub_date), datetime) |         self.assertEqual(t.cleaned, True) | ||||||
|  |         raw_doc = get_as_pymongo(t) | ||||||
|  |         # Make sure clean changes makes it to the db | ||||||
|  |         self.assertEqual( | ||||||
|  |             raw_doc, | ||||||
|  |             { | ||||||
|  |                 'status': 'published', | ||||||
|  |                 'cleaned': True, | ||||||
|  |                 '_id': t.id | ||||||
|  |             }) | ||||||
|  |  | ||||||
|     def test_document_embedded_clean(self): |     def test_document_embedded_clean(self): | ||||||
|         class TestEmbeddedDocument(EmbeddedDocument): |         class TestEmbeddedDocument(EmbeddedDocument): | ||||||
| @@ -773,12 +840,13 @@ class InstanceTest(unittest.TestCase): | |||||||
|         TestDocument.drop_collection() |         TestDocument.drop_collection() | ||||||
|  |  | ||||||
|         t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25, z=15)) |         t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25, z=15)) | ||||||
|         try: |  | ||||||
|  |         with self.assertRaises(ValidationError) as cm: | ||||||
|             t.save() |             t.save() | ||||||
|         except ValidationError as e: |  | ||||||
|             expect_msg = "Value of z != x + y" |         expected_msg = "Value of z != x + y" | ||||||
|             self.assertTrue(expect_msg in e.message) |         self.assertIn(expected_msg, cm.exception.message) | ||||||
|             self.assertEqual(e.to_dict(), {'doc': {'__all__': expect_msg}}) |         self.assertEqual(cm.exception.to_dict(), {'doc': {'__all__': expected_msg}}) | ||||||
|  |  | ||||||
|         t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25)).save() |         t = TestDocument(doc=TestEmbeddedDocument(x=10, y=25)).save() | ||||||
|         self.assertEqual(t.doc.z, 35) |         self.assertEqual(t.doc.z, 35) | ||||||
| @@ -810,7 +878,8 @@ class InstanceTest(unittest.TestCase): | |||||||
|         doc2 = self.Person(name="jim", age=20).save() |         doc2 = self.Person(name="jim", age=20).save() | ||||||
|         docs = [dict(doc1.to_mongo()), dict(doc2.to_mongo())] |         docs = [dict(doc1.to_mongo()), dict(doc2.to_mongo())] | ||||||
|  |  | ||||||
|         assert not doc1.modify({'name': doc2.name}, set__age=100) |         n_modified = doc1.modify({'name': doc2.name}, set__age=100) | ||||||
|  |         self.assertEqual(n_modified, 0) | ||||||
|  |  | ||||||
|         self.assertDbEqual(docs) |         self.assertDbEqual(docs) | ||||||
|  |  | ||||||
| @@ -819,7 +888,8 @@ class InstanceTest(unittest.TestCase): | |||||||
|         doc2 = self.Person(id=ObjectId(), name="jim", age=20) |         doc2 = self.Person(id=ObjectId(), name="jim", age=20) | ||||||
|         docs = [dict(doc1.to_mongo())] |         docs = [dict(doc1.to_mongo())] | ||||||
|  |  | ||||||
|         assert not doc2.modify({'name': doc2.name}, set__age=100) |         n_modified = doc2.modify({'name': doc2.name}, set__age=100) | ||||||
|  |         self.assertEqual(n_modified, 0) | ||||||
|  |  | ||||||
|         self.assertDbEqual(docs) |         self.assertDbEqual(docs) | ||||||
|  |  | ||||||
| @@ -835,23 +905,29 @@ class InstanceTest(unittest.TestCase): | |||||||
|         doc.job.name = "Google" |         doc.job.name = "Google" | ||||||
|         doc.job.years = 3 |         doc.job.years = 3 | ||||||
|  |  | ||||||
|         assert doc.modify( |         n_modified = doc.modify( | ||||||
|             set__age=21, set__job__name="MongoDB", unset__job__years=True) |             set__age=21, set__job__name="MongoDB", unset__job__years=True) | ||||||
|  |         self.assertEqual(n_modified, 1) | ||||||
|         doc_copy.age = 21 |         doc_copy.age = 21 | ||||||
|         doc_copy.job.name = "MongoDB" |         doc_copy.job.name = "MongoDB" | ||||||
|         del doc_copy.job.years |         del doc_copy.job.years | ||||||
|  |  | ||||||
|         assert doc.to_json() == doc_copy.to_json() |         self.assertEqual(doc.to_json(), doc_copy.to_json()) | ||||||
|         assert doc._get_changed_fields() == [] |         self.assertEqual(doc._get_changed_fields(), []) | ||||||
|  |  | ||||||
|         self.assertDbEqual([dict(other_doc.to_mongo()), dict(doc.to_mongo())]) |         self.assertDbEqual([dict(other_doc.to_mongo()), dict(doc.to_mongo())]) | ||||||
|  |  | ||||||
|     @needs_mongodb_v26 |  | ||||||
|     def test_modify_with_positional_push(self): |     def test_modify_with_positional_push(self): | ||||||
|  |         class Content(EmbeddedDocument): | ||||||
|  |             keywords = ListField(StringField()) | ||||||
|  |  | ||||||
|         class BlogPost(Document): |         class BlogPost(Document): | ||||||
|             tags = ListField(StringField()) |             tags = ListField(StringField()) | ||||||
|  |             content = EmbeddedDocumentField(Content) | ||||||
|  |  | ||||||
|  |         post = BlogPost.objects.create( | ||||||
|  |             tags=['python'], content=Content(keywords=['ipsum'])) | ||||||
|  |  | ||||||
|         post = BlogPost.objects.create(tags=['python']) |  | ||||||
|         self.assertEqual(post.tags, ['python']) |         self.assertEqual(post.tags, ['python']) | ||||||
|         post.modify(push__tags__0=['code', 'mongo']) |         post.modify(push__tags__0=['code', 'mongo']) | ||||||
|         self.assertEqual(post.tags, ['code', 'mongo', 'python']) |         self.assertEqual(post.tags, ['code', 'mongo', 'python']) | ||||||
| @@ -862,6 +938,16 @@ class InstanceTest(unittest.TestCase): | |||||||
|             ['code', 'mongo', 'python'] |             ['code', 'mongo', 'python'] | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |         self.assertEqual(post.content.keywords, ['ipsum']) | ||||||
|  |         post.modify(push__content__keywords__0=['lorem']) | ||||||
|  |         self.assertEqual(post.content.keywords, ['lorem', 'ipsum']) | ||||||
|  |  | ||||||
|  |         # Assert same order of the list items is maintained in the db | ||||||
|  |         self.assertEqual( | ||||||
|  |             BlogPost._get_collection().find_one({'_id': post.pk})['content']['keywords'], | ||||||
|  |             ['lorem', 'ipsum'] | ||||||
|  |         ) | ||||||
|  |  | ||||||
|     def test_save(self): |     def test_save(self): | ||||||
|         """Ensure that a document may be saved in the database.""" |         """Ensure that a document may be saved in the database.""" | ||||||
|  |  | ||||||
| @@ -870,19 +956,39 @@ class InstanceTest(unittest.TestCase): | |||||||
|         person.save() |         person.save() | ||||||
|  |  | ||||||
|         # Ensure that the object is in the database |         # Ensure that the object is in the database | ||||||
|         collection = self.db[self.Person._get_collection_name()] |         raw_doc = get_as_pymongo(person) | ||||||
|         person_obj = collection.find_one({'name': 'Test User'}) |         self.assertEqual( | ||||||
|         self.assertEqual(person_obj['name'], 'Test User') |             raw_doc, | ||||||
|         self.assertEqual(person_obj['age'], 30) |             { | ||||||
|         self.assertEqual(person_obj['_id'], person.id) |                 '_cls': 'Person', | ||||||
|  |                 'name': 'Test User', | ||||||
|  |                 'age': 30, | ||||||
|  |                 '_id': person.id | ||||||
|  |             }) | ||||||
|  |  | ||||||
|         # Test skipping validation on save |     def test_save_skip_validation(self): | ||||||
|         class Recipient(Document): |         class Recipient(Document): | ||||||
|             email = EmailField(required=True) |             email = EmailField(required=True) | ||||||
|  |  | ||||||
|         recipient = Recipient(email='not-an-email') |         recipient = Recipient(email='not-an-email') | ||||||
|         self.assertRaises(ValidationError, recipient.save) |         with self.assertRaises(ValidationError): | ||||||
|  |             recipient.save() | ||||||
|  |  | ||||||
|         recipient.save(validate=False) |         recipient.save(validate=False) | ||||||
|  |         raw_doc = get_as_pymongo(recipient) | ||||||
|  |         self.assertEqual( | ||||||
|  |             raw_doc, | ||||||
|  |             { | ||||||
|  |                 'email': 'not-an-email', | ||||||
|  |                 '_id': recipient.id | ||||||
|  |             }) | ||||||
|  |  | ||||||
|  |     def test_save_with_bad_id(self): | ||||||
|  |         class Clown(Document): | ||||||
|  |             id = IntField(primary_key=True) | ||||||
|  |  | ||||||
|  |         with self.assertRaises(ValidationError): | ||||||
|  |             Clown(id="not_an_int").save() | ||||||
|  |  | ||||||
|     def test_save_to_a_value_that_equates_to_false(self): |     def test_save_to_a_value_that_equates_to_false(self): | ||||||
|         class Thing(EmbeddedDocument): |         class Thing(EmbeddedDocument): | ||||||
| @@ -1146,6 +1252,50 @@ class InstanceTest(unittest.TestCase): | |||||||
|         self.assertTrue(w1.toggle) |         self.assertTrue(w1.toggle) | ||||||
|         self.assertEqual(w1.count, 3) |         self.assertEqual(w1.count, 3) | ||||||
|  |  | ||||||
|  |     def test_save_update_selectively(self): | ||||||
|  |         class WildBoy(Document): | ||||||
|  |             age = IntField() | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         WildBoy.drop_collection() | ||||||
|  |  | ||||||
|  |         WildBoy(age=12, name='John').save() | ||||||
|  |  | ||||||
|  |         boy1 = WildBoy.objects().first() | ||||||
|  |         boy2 = WildBoy.objects().first() | ||||||
|  |  | ||||||
|  |         boy1.age = 99 | ||||||
|  |         boy1.save() | ||||||
|  |         boy2.name = 'Bob' | ||||||
|  |         boy2.save() | ||||||
|  |  | ||||||
|  |         fresh_boy = WildBoy.objects().first() | ||||||
|  |         self.assertEqual(fresh_boy.age, 99) | ||||||
|  |         self.assertEqual(fresh_boy.name, 'Bob') | ||||||
|  |  | ||||||
|  |     def test_save_update_selectively_with_custom_pk(self): | ||||||
|  |         # Prevents regression of #2082 | ||||||
|  |         class WildBoy(Document): | ||||||
|  |             pk_id = StringField(primary_key=True) | ||||||
|  |             age = IntField() | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         WildBoy.drop_collection() | ||||||
|  |  | ||||||
|  |         WildBoy(pk_id='A', age=12, name='John').save() | ||||||
|  |  | ||||||
|  |         boy1 = WildBoy.objects().first() | ||||||
|  |         boy2 = WildBoy.objects().first() | ||||||
|  |  | ||||||
|  |         boy1.age = 99 | ||||||
|  |         boy1.save() | ||||||
|  |         boy2.name = 'Bob' | ||||||
|  |         boy2.save() | ||||||
|  |  | ||||||
|  |         fresh_boy = WildBoy.objects().first() | ||||||
|  |         self.assertEqual(fresh_boy.age, 99) | ||||||
|  |         self.assertEqual(fresh_boy.name, 'Bob') | ||||||
|  |  | ||||||
|     def test_update(self): |     def test_update(self): | ||||||
|         """Ensure that an existing document is updated instead of be |         """Ensure that an existing document is updated instead of be | ||||||
|         overwritten. |         overwritten. | ||||||
| @@ -1428,6 +1578,62 @@ class InstanceTest(unittest.TestCase): | |||||||
|         self.assertEqual(person.age, 21) |         self.assertEqual(person.age, 21) | ||||||
|         self.assertEqual(person.active, False) |         self.assertEqual(person.active, False) | ||||||
|  |  | ||||||
|  |     def test__get_changed_fields_same_ids_reference_field_does_not_enters_infinite_loop_embedded_doc(self): | ||||||
|  |         # Refers to Issue #1685 | ||||||
|  |         class EmbeddedChildModel(EmbeddedDocument): | ||||||
|  |             id = DictField(primary_key=True) | ||||||
|  |  | ||||||
|  |         class ParentModel(Document): | ||||||
|  |             child = EmbeddedDocumentField( | ||||||
|  |                 EmbeddedChildModel) | ||||||
|  |  | ||||||
|  |         emb = EmbeddedChildModel(id={'1': [1]}) | ||||||
|  |         changed_fields = ParentModel(child=emb)._get_changed_fields() | ||||||
|  |         self.assertEqual(changed_fields, []) | ||||||
|  |  | ||||||
|  |     def test__get_changed_fields_same_ids_reference_field_does_not_enters_infinite_loop_different_doc(self): | ||||||
|  |         # Refers to Issue #1685 | ||||||
|  |         class User(Document): | ||||||
|  |             id = IntField(primary_key=True) | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Message(Document): | ||||||
|  |             id = IntField(primary_key=True) | ||||||
|  |             author = ReferenceField(User) | ||||||
|  |  | ||||||
|  |         Message.drop_collection() | ||||||
|  |  | ||||||
|  |         # All objects share the same id, but each in a different collection | ||||||
|  |         user = User(id=1, name='user-name').save() | ||||||
|  |         message = Message(id=1, author=user).save() | ||||||
|  |  | ||||||
|  |         message.author.name = 'tutu' | ||||||
|  |         self.assertEqual(message._get_changed_fields(), []) | ||||||
|  |         self.assertEqual(user._get_changed_fields(), ['name']) | ||||||
|  |  | ||||||
|  |     def test__get_changed_fields_same_ids_embedded(self): | ||||||
|  |         # Refers to Issue #1768 | ||||||
|  |         class User(EmbeddedDocument): | ||||||
|  |             id = IntField() | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Message(Document): | ||||||
|  |             id = IntField(primary_key=True) | ||||||
|  |             author = EmbeddedDocumentField(User) | ||||||
|  |  | ||||||
|  |         Message.drop_collection() | ||||||
|  |  | ||||||
|  |         # All objects share the same id, but each in a different collection | ||||||
|  |         user = User(id=1, name='user-name')  # .save() | ||||||
|  |         message = Message(id=1, author=user).save() | ||||||
|  |  | ||||||
|  |         message.author.name = 'tutu' | ||||||
|  |         self.assertEqual(message._get_changed_fields(), ['author.name']) | ||||||
|  |         message.save() | ||||||
|  |  | ||||||
|  |         message_fetched = Message.objects.with_id(message.id) | ||||||
|  |         self.assertEqual(message_fetched.author.name, 'tutu') | ||||||
|  |  | ||||||
|     def test_query_count_when_saving(self): |     def test_query_count_when_saving(self): | ||||||
|         """Ensure references don't cause extra fetches when saving""" |         """Ensure references don't cause extra fetches when saving""" | ||||||
|         class Organization(Document): |         class Organization(Document): | ||||||
| @@ -1461,9 +1667,9 @@ class InstanceTest(unittest.TestCase): | |||||||
|         user = User.objects.first() |         user = User.objects.first() | ||||||
|         # Even if stored as ObjectId's internally mongoengine uses DBRefs |         # Even if stored as ObjectId's internally mongoengine uses DBRefs | ||||||
|         # As ObjectId's aren't automatically derefenced |         # As ObjectId's aren't automatically derefenced | ||||||
|         self.assertTrue(isinstance(user._data['orgs'][0], DBRef)) |         self.assertIsInstance(user._data['orgs'][0], DBRef) | ||||||
|         self.assertTrue(isinstance(user.orgs[0], Organization)) |         self.assertIsInstance(user.orgs[0], Organization) | ||||||
|         self.assertTrue(isinstance(user._data['orgs'][0], Organization)) |         self.assertIsInstance(user._data['orgs'][0], Organization) | ||||||
|  |  | ||||||
|         # Changing a value |         # Changing a value | ||||||
|         with query_counter() as q: |         with query_counter() as q: | ||||||
| @@ -1843,9 +2049,8 @@ class InstanceTest(unittest.TestCase): | |||||||
|         post_obj = BlogPost.objects.first() |         post_obj = BlogPost.objects.first() | ||||||
|  |  | ||||||
|         # Test laziness |         # Test laziness | ||||||
|         self.assertTrue(isinstance(post_obj._data['author'], |         self.assertIsInstance(post_obj._data['author'], bson.DBRef) | ||||||
|                                    bson.DBRef)) |         self.assertIsInstance(post_obj.author, self.Person) | ||||||
|         self.assertTrue(isinstance(post_obj.author, self.Person)) |  | ||||||
|         self.assertEqual(post_obj.author.name, 'Test User') |         self.assertEqual(post_obj.author.name, 'Test User') | ||||||
|  |  | ||||||
|         # Ensure that the dereferenced object may be changed and saved |         # Ensure that the dereferenced object may be changed and saved | ||||||
| @@ -1937,7 +2142,6 @@ class InstanceTest(unittest.TestCase): | |||||||
|         child_record.delete() |         child_record.delete() | ||||||
|         self.assertEqual(Record.objects(name='parent').get().children, []) |         self.assertEqual(Record.objects(name='parent').get().children, []) | ||||||
|  |  | ||||||
|  |  | ||||||
|     def test_reverse_delete_rule_with_custom_id_field(self): |     def test_reverse_delete_rule_with_custom_id_field(self): | ||||||
|         """Ensure that a referenced document with custom primary key |         """Ensure that a referenced document with custom primary key | ||||||
|         is also deleted upon deletion. |         is also deleted upon deletion. | ||||||
| @@ -2251,12 +2455,12 @@ class InstanceTest(unittest.TestCase): | |||||||
|         # Make sure docs are properly identified in a list (__eq__ is used |         # Make sure docs are properly identified in a list (__eq__ is used | ||||||
|         # for the comparison). |         # for the comparison). | ||||||
|         all_user_list = list(User.objects.all()) |         all_user_list = list(User.objects.all()) | ||||||
|         self.assertTrue(u1 in all_user_list) |         self.assertIn(u1, all_user_list) | ||||||
|         self.assertTrue(u2 in all_user_list) |         self.assertIn(u2, all_user_list) | ||||||
|         self.assertTrue(u3 in all_user_list) |         self.assertIn(u3, all_user_list) | ||||||
|         self.assertTrue(u4 not in all_user_list)  # New object |         self.assertNotIn(u4, all_user_list)  # New object | ||||||
|         self.assertTrue(b1 not in all_user_list)  # Other object |         self.assertNotIn(b1, all_user_list)  # Other object | ||||||
|         self.assertTrue(b2 not in all_user_list)  # Other object |         self.assertNotIn(b2, all_user_list)  # Other object | ||||||
|  |  | ||||||
|         # Make sure docs can be used as keys in a dict (__hash__ is used |         # Make sure docs can be used as keys in a dict (__hash__ is used | ||||||
|         # for hashing the docs). |         # for hashing the docs). | ||||||
| @@ -2274,10 +2478,10 @@ class InstanceTest(unittest.TestCase): | |||||||
|         # Make sure docs are properly identified in a set (__hash__ is used |         # Make sure docs are properly identified in a set (__hash__ is used | ||||||
|         # for hashing the docs). |         # for hashing the docs). | ||||||
|         all_user_set = set(User.objects.all()) |         all_user_set = set(User.objects.all()) | ||||||
|         self.assertTrue(u1 in all_user_set) |         self.assertIn(u1, all_user_set) | ||||||
|         self.assertTrue(u4 not in all_user_set) |         self.assertNotIn(u4, all_user_set) | ||||||
|         self.assertTrue(b1 not in all_user_list) |         self.assertNotIn(b1, all_user_list) | ||||||
|         self.assertTrue(b2 not in all_user_list) |         self.assertNotIn(b2, all_user_list) | ||||||
|  |  | ||||||
|         # Make sure duplicate docs aren't accepted in the set |         # Make sure duplicate docs aren't accepted in the set | ||||||
|         self.assertEqual(len(all_user_set), 3) |         self.assertEqual(len(all_user_set), 3) | ||||||
| @@ -2694,7 +2898,7 @@ class InstanceTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         User.drop_collection() |         User.drop_collection() | ||||||
|  |  | ||||||
|         User._get_collection().save({ |         User._get_collection().insert_one({ | ||||||
|             'name': 'John', |             'name': 'John', | ||||||
|             'foo': 'Bar', |             'foo': 'Bar', | ||||||
|             'data': [1, 2, 3] |             'data': [1, 2, 3] | ||||||
| @@ -2710,7 +2914,7 @@ class InstanceTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         User.drop_collection() |         User.drop_collection() | ||||||
|  |  | ||||||
|         User._get_collection().save({ |         User._get_collection().insert_one({ | ||||||
|             'name': 'John', |             'name': 'John', | ||||||
|             'foo': 'Bar', |             'foo': 'Bar', | ||||||
|             'data': [1, 2, 3] |             'data': [1, 2, 3] | ||||||
| @@ -2733,7 +2937,7 @@ class InstanceTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         User.drop_collection() |         User.drop_collection() | ||||||
|  |  | ||||||
|         User._get_collection().save({ |         User._get_collection().insert_one({ | ||||||
|             'name': 'John', |             'name': 'John', | ||||||
|             'thing': { |             'thing': { | ||||||
|                 'name': 'My thing', |                 'name': 'My thing', | ||||||
| @@ -2756,7 +2960,7 @@ class InstanceTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         User.drop_collection() |         User.drop_collection() | ||||||
|  |  | ||||||
|         User._get_collection().save({ |         User._get_collection().insert_one({ | ||||||
|             'name': 'John', |             'name': 'John', | ||||||
|             'thing': { |             'thing': { | ||||||
|                 'name': 'My thing', |                 'name': 'My thing', | ||||||
| @@ -2779,7 +2983,7 @@ class InstanceTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         User.drop_collection() |         User.drop_collection() | ||||||
|  |  | ||||||
|         User._get_collection().save({ |         User._get_collection().insert_one({ | ||||||
|             'name': 'John', |             'name': 'John', | ||||||
|             'thing': { |             'thing': { | ||||||
|                 'name': 'My thing', |                 'name': 'My thing', | ||||||
| @@ -2978,7 +3182,7 @@ class InstanceTest(unittest.TestCase): | |||||||
|         Person(name="Harry Potter").save() |         Person(name="Harry Potter").save() | ||||||
|  |  | ||||||
|         person = Person.objects.first() |         person = Person.objects.first() | ||||||
|         self.assertTrue('id' in person._data.keys()) |         self.assertIn('id', person._data.keys()) | ||||||
|         self.assertEqual(person._data.get('id'), person.id) |         self.assertEqual(person._data.get('id'), person.id) | ||||||
|  |  | ||||||
|     def test_complex_nesting_document_and_embedded_document(self): |     def test_complex_nesting_document_and_embedded_document(self): | ||||||
| @@ -2996,7 +3200,7 @@ class InstanceTest(unittest.TestCase): | |||||||
|  |  | ||||||
|             def expand(self): |             def expand(self): | ||||||
|                 self.flattened_parameter = {} |                 self.flattened_parameter = {} | ||||||
|                 for parameter_name, parameter in self.parameters.iteritems(): |                 for parameter_name, parameter in iteritems(self.parameters): | ||||||
|                     parameter.expand() |                     parameter.expand() | ||||||
|  |  | ||||||
|         class NodesSystem(Document): |         class NodesSystem(Document): | ||||||
| @@ -3004,7 +3208,7 @@ class InstanceTest(unittest.TestCase): | |||||||
|             nodes = MapField(ReferenceField(Node, dbref=False)) |             nodes = MapField(ReferenceField(Node, dbref=False)) | ||||||
|  |  | ||||||
|             def save(self, *args, **kwargs): |             def save(self, *args, **kwargs): | ||||||
|                 for node_name, node in self.nodes.iteritems(): |                 for node_name, node in iteritems(self.nodes): | ||||||
|                     node.expand() |                     node.expand() | ||||||
|                     node.save(*args, **kwargs) |                     node.save(*args, **kwargs) | ||||||
|                 super(NodesSystem, self).save(*args, **kwargs) |                 super(NodesSystem, self).save(*args, **kwargs) | ||||||
| @@ -3070,36 +3274,36 @@ class InstanceTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         dbref2 = f._data['test2'] |         dbref2 = f._data['test2'] | ||||||
|         obj2 = f.test2 |         obj2 = f.test2 | ||||||
|         self.assertTrue(isinstance(dbref2, DBRef)) |         self.assertIsInstance(dbref2, DBRef) | ||||||
|         self.assertTrue(isinstance(obj2, Test2)) |         self.assertIsInstance(obj2, Test2) | ||||||
|         self.assertTrue(obj2.id == dbref2.id) |         self.assertEqual(obj2.id, dbref2.id) | ||||||
|         self.assertTrue(obj2 == dbref2) |         self.assertEqual(obj2, dbref2) | ||||||
|         self.assertTrue(dbref2 == obj2) |         self.assertEqual(dbref2, obj2) | ||||||
|  |  | ||||||
|         dbref3 = f._data['test3'] |         dbref3 = f._data['test3'] | ||||||
|         obj3 = f.test3 |         obj3 = f.test3 | ||||||
|         self.assertTrue(isinstance(dbref3, DBRef)) |         self.assertIsInstance(dbref3, DBRef) | ||||||
|         self.assertTrue(isinstance(obj3, Test3)) |         self.assertIsInstance(obj3, Test3) | ||||||
|         self.assertTrue(obj3.id == dbref3.id) |         self.assertEqual(obj3.id, dbref3.id) | ||||||
|         self.assertTrue(obj3 == dbref3) |         self.assertEqual(obj3, dbref3) | ||||||
|         self.assertTrue(dbref3 == obj3) |         self.assertEqual(dbref3, obj3) | ||||||
|  |  | ||||||
|         self.assertTrue(obj2.id == obj3.id) |         self.assertEqual(obj2.id, obj3.id) | ||||||
|         self.assertTrue(dbref2.id == dbref3.id) |         self.assertEqual(dbref2.id, dbref3.id) | ||||||
|         self.assertFalse(dbref2 == dbref3) |         self.assertNotEqual(dbref2, dbref3) | ||||||
|         self.assertFalse(dbref3 == dbref2) |         self.assertNotEqual(dbref3, dbref2) | ||||||
|         self.assertTrue(dbref2 != dbref3) |         self.assertNotEqual(dbref2, dbref3) | ||||||
|         self.assertTrue(dbref3 != dbref2) |         self.assertNotEqual(dbref3, dbref2) | ||||||
|  |  | ||||||
|         self.assertFalse(obj2 == dbref3) |         self.assertNotEqual(obj2, dbref3) | ||||||
|         self.assertFalse(dbref3 == obj2) |         self.assertNotEqual(dbref3, obj2) | ||||||
|         self.assertTrue(obj2 != dbref3) |         self.assertNotEqual(obj2, dbref3) | ||||||
|         self.assertTrue(dbref3 != obj2) |         self.assertNotEqual(dbref3, obj2) | ||||||
|  |  | ||||||
|         self.assertFalse(obj3 == dbref2) |         self.assertNotEqual(obj3, dbref2) | ||||||
|         self.assertFalse(dbref2 == obj3) |         self.assertNotEqual(dbref2, obj3) | ||||||
|         self.assertTrue(obj3 != dbref2) |         self.assertNotEqual(obj3, dbref2) | ||||||
|         self.assertTrue(dbref2 != obj3) |         self.assertNotEqual(dbref2, obj3) | ||||||
|  |  | ||||||
|     def test_default_values(self): |     def test_default_values(self): | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
| @@ -3114,7 +3318,7 @@ class InstanceTest(unittest.TestCase): | |||||||
|         p2.name = 'alon2' |         p2.name = 'alon2' | ||||||
|         p2.save() |         p2.save() | ||||||
|         p3 = Person.objects().only('created_on')[0] |         p3 = Person.objects().only('created_on')[0] | ||||||
|         self.assertEquals(orig_created_on, p3.created_on) |         self.assertEqual(orig_created_on, p3.created_on) | ||||||
|  |  | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             created_on = DateTimeField(default=lambda: datetime.utcnow()) |             created_on = DateTimeField(default=lambda: datetime.utcnow()) | ||||||
| @@ -3123,31 +3327,88 @@ class InstanceTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         p4 = Person.objects()[0] |         p4 = Person.objects()[0] | ||||||
|         p4.save() |         p4.save() | ||||||
|         self.assertEquals(p4.height, 189) |         self.assertEqual(p4.height, 189) | ||||||
|  |  | ||||||
|         # However the default will not be fixed in DB |         # However the default will not be fixed in DB | ||||||
|         self.assertEquals(Person.objects(height=189).count(), 0) |         self.assertEqual(Person.objects(height=189).count(), 0) | ||||||
|  |  | ||||||
|         # alter DB for the new default |         # alter DB for the new default | ||||||
|         coll = Person._get_collection() |         coll = Person._get_collection() | ||||||
|         for person in Person.objects.as_pymongo(): |         for person in Person.objects.as_pymongo(): | ||||||
|             if 'height' not in person: |             if 'height' not in person: | ||||||
|                 person['height'] = 189 |                 coll.update_one({'_id': person['_id']}, {'$set': {'height': 189}}) | ||||||
|                 coll.save(person) |  | ||||||
|  |  | ||||||
|         self.assertEquals(Person.objects(height=189).count(), 1) |         self.assertEqual(Person.objects(height=189).count(), 1) | ||||||
|  |  | ||||||
|     def test_from_son(self): |     def test_from_son(self): | ||||||
|         # 771 |         # 771 | ||||||
|         class MyPerson(self.Person): |         class MyPerson(self.Person): | ||||||
|             meta = dict(shard_key=["id"]) |             meta = dict(shard_key=["id"]) | ||||||
|         p = MyPerson.from_json('{"name": "name", "age": 27}', created=True) |         p = MyPerson.from_json('{"name": "name", "age": 27}', created=True) | ||||||
|         self.assertEquals(p.id, None) |         self.assertEqual(p.id, None) | ||||||
|         p.id = "12345"  # in case it is not working: "OperationError: Shard Keys are immutable..." will be raised here |         p.id = "12345"  # in case it is not working: "OperationError: Shard Keys are immutable..." will be raised here | ||||||
|         p = MyPerson._from_son({"name": "name", "age": 27}, created=True) |         p = MyPerson._from_son({"name": "name", "age": 27}, created=True) | ||||||
|         self.assertEquals(p.id, None) |         self.assertEqual(p.id, None) | ||||||
|         p.id = "12345"  # in case it is not working: "OperationError: Shard Keys are immutable..." will be raised here |         p.id = "12345"  # in case it is not working: "OperationError: Shard Keys are immutable..." will be raised here | ||||||
|  |  | ||||||
|  |     def test_from_son_created_False_without_id(self): | ||||||
|  |         class MyPerson(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         MyPerson.objects.delete() | ||||||
|  |  | ||||||
|  |         p = MyPerson.from_json('{"name": "a_fancy_name"}', created=False) | ||||||
|  |         self.assertFalse(p._created) | ||||||
|  |         self.assertIsNone(p.id) | ||||||
|  |         p.save() | ||||||
|  |         self.assertIsNotNone(p.id) | ||||||
|  |         saved_p = MyPerson.objects.get(id=p.id) | ||||||
|  |         self.assertEqual(saved_p.name, 'a_fancy_name') | ||||||
|  |  | ||||||
|  |     def test_from_son_created_False_with_id(self): | ||||||
|  |         # 1854 | ||||||
|  |         class MyPerson(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         MyPerson.objects.delete() | ||||||
|  |  | ||||||
|  |         p = MyPerson.from_json('{"_id": "5b85a8b04ec5dc2da388296e", "name": "a_fancy_name"}', created=False) | ||||||
|  |         self.assertFalse(p._created) | ||||||
|  |         self.assertEqual(p._changed_fields, []) | ||||||
|  |         self.assertEqual(p.name, 'a_fancy_name') | ||||||
|  |         self.assertEqual(p.id, ObjectId('5b85a8b04ec5dc2da388296e')) | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         with self.assertRaises(DoesNotExist): | ||||||
|  |             # Since created=False and we gave an id in the json and _changed_fields is empty | ||||||
|  |             # mongoengine assumes that the document exits with that structure already | ||||||
|  |             # and calling .save() didn't save anything | ||||||
|  |             MyPerson.objects.get(id=p.id) | ||||||
|  |  | ||||||
|  |         self.assertFalse(p._created) | ||||||
|  |         p.name = 'a new fancy name' | ||||||
|  |         self.assertEqual(p._changed_fields, ['name']) | ||||||
|  |         p.save() | ||||||
|  |         saved_p = MyPerson.objects.get(id=p.id) | ||||||
|  |         self.assertEqual(saved_p.name, p.name) | ||||||
|  |  | ||||||
|  |     def test_from_son_created_True_with_an_id(self): | ||||||
|  |         class MyPerson(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         MyPerson.objects.delete() | ||||||
|  |  | ||||||
|  |         p = MyPerson.from_json('{"_id": "5b85a8b04ec5dc2da388296e", "name": "a_fancy_name"}', created=True) | ||||||
|  |         self.assertTrue(p._created) | ||||||
|  |         self.assertEqual(p._changed_fields, []) | ||||||
|  |         self.assertEqual(p.name, 'a_fancy_name') | ||||||
|  |         self.assertEqual(p.id, ObjectId('5b85a8b04ec5dc2da388296e')) | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         saved_p = MyPerson.objects.get(id=p.id) | ||||||
|  |         self.assertEqual(saved_p, p) | ||||||
|  |         self.assertEqual(p.name, 'a_fancy_name') | ||||||
|  |  | ||||||
|     def test_null_field(self): |     def test_null_field(self): | ||||||
|         # 734 |         # 734 | ||||||
|         class User(Document): |         class User(Document): | ||||||
| @@ -3165,7 +3426,7 @@ class InstanceTest(unittest.TestCase): | |||||||
|         u_from_db = User.objects.get(name='user') |         u_from_db = User.objects.get(name='user') | ||||||
|         u_from_db.height = None |         u_from_db.height = None | ||||||
|         u_from_db.save() |         u_from_db.save() | ||||||
|         self.assertEquals(u_from_db.height, None) |         self.assertEqual(u_from_db.height, None) | ||||||
|         # 864 |         # 864 | ||||||
|         self.assertEqual(u_from_db.str_fld, None) |         self.assertEqual(u_from_db.str_fld, None) | ||||||
|         self.assertEqual(u_from_db.int_fld, None) |         self.assertEqual(u_from_db.int_fld, None) | ||||||
| @@ -3179,7 +3440,7 @@ class InstanceTest(unittest.TestCase): | |||||||
|         u.save() |         u.save() | ||||||
|         User.objects(name='user').update_one(set__height=None, upsert=True) |         User.objects(name='user').update_one(set__height=None, upsert=True) | ||||||
|         u_from_db = User.objects.get(name='user') |         u_from_db = User.objects.get(name='user') | ||||||
|         self.assertEquals(u_from_db.height, None) |         self.assertEqual(u_from_db.height, None) | ||||||
|  |  | ||||||
|     def test_not_saved_eq(self): |     def test_not_saved_eq(self): | ||||||
|         """Ensure we can compare documents not saved. |         """Ensure we can compare documents not saved. | ||||||
| @@ -3221,7 +3482,6 @@ class InstanceTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         person.update(set__height=2.0) |         person.update(set__height=2.0) | ||||||
|  |  | ||||||
|     @needs_mongodb_v26 |  | ||||||
|     def test_push_with_position(self): |     def test_push_with_position(self): | ||||||
|         """Ensure that push with position works properly for an instance.""" |         """Ensure that push with position works properly for an instance.""" | ||||||
|         class BlogPost(Document): |         class BlogPost(Document): | ||||||
| @@ -3248,6 +3508,22 @@ class InstanceTest(unittest.TestCase): | |||||||
|         blog.reload() |         blog.reload() | ||||||
|         self.assertEqual(blog.tags, [["value1", 123]]) |         self.assertEqual(blog.tags, [["value1", 123]]) | ||||||
|  |  | ||||||
|  |     def test_accessing_objects_with_indexes_error(self): | ||||||
|  |         insert_result = self.db.company.insert_many([{'name': 'Foo'}, | ||||||
|  |                                                      {'name': 'Foo'}])  # Force 2 doc with same name | ||||||
|  |         REF_OID = insert_result.inserted_ids[0] | ||||||
|  |         self.db.user.insert_one({'company': REF_OID})   # Force 2 doc with same name | ||||||
|  |  | ||||||
|  |         class Company(Document): | ||||||
|  |             name = StringField(unique=True) | ||||||
|  |  | ||||||
|  |         class User(Document): | ||||||
|  |             company = ReferenceField(Company) | ||||||
|  |  | ||||||
|  |         # Ensure index creation exception aren't swallowed (#1688) | ||||||
|  |         with self.assertRaises(DuplicateKeyError): | ||||||
|  |             User.objects().select_related() | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|     unittest.main() |     unittest.main() | ||||||
|   | |||||||
| @@ -32,12 +32,12 @@ class TestJson(unittest.TestCase): | |||||||
|             string = StringField(db_field='s') |             string = StringField(db_field='s') | ||||||
|             embedded = EmbeddedDocumentField(Embedded, db_field='e') |             embedded = EmbeddedDocumentField(Embedded, db_field='e') | ||||||
|  |  | ||||||
|         doc = Doc( string="Hello", embedded=Embedded(string="Inner Hello")) |         doc = Doc(string="Hello", embedded=Embedded(string="Inner Hello")) | ||||||
|         doc_json = doc.to_json(sort_keys=True, use_db_field=False,separators=(',', ':')) |         doc_json = doc.to_json(sort_keys=True, use_db_field=False, separators=(',', ':')) | ||||||
|  |  | ||||||
|         expected_json = """{"embedded":{"string":"Inner Hello"},"string":"Hello"}""" |         expected_json = """{"embedded":{"string":"Inner Hello"},"string":"Hello"}""" | ||||||
|  |  | ||||||
|         self.assertEqual( doc_json, expected_json) |         self.assertEqual(doc_json, expected_json) | ||||||
|  |  | ||||||
|     def test_json_simple(self): |     def test_json_simple(self): | ||||||
|  |  | ||||||
| @@ -61,10 +61,6 @@ class TestJson(unittest.TestCase): | |||||||
|         self.assertEqual(doc, Doc.from_json(doc.to_json())) |         self.assertEqual(doc, Doc.from_json(doc.to_json())) | ||||||
|  |  | ||||||
|     def test_json_complex(self): |     def test_json_complex(self): | ||||||
|  |  | ||||||
|         if pymongo.version_tuple[0] <= 2 and pymongo.version_tuple[1] <= 3: |  | ||||||
|             raise SkipTest("Need pymongo 2.4 as has a fix for DBRefs") |  | ||||||
|  |  | ||||||
|         class EmbeddedDoc(EmbeddedDocument): |         class EmbeddedDoc(EmbeddedDocument): | ||||||
|             pass |             pass | ||||||
|  |  | ||||||
|   | |||||||
| @@ -20,16 +20,16 @@ class ValidatorErrorTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         # 1st level error schema |         # 1st level error schema | ||||||
|         error.errors = {'1st': ValidationError('bad 1st'), } |         error.errors = {'1st': ValidationError('bad 1st'), } | ||||||
|         self.assertTrue('1st' in error.to_dict()) |         self.assertIn('1st', error.to_dict()) | ||||||
|         self.assertEqual(error.to_dict()['1st'], 'bad 1st') |         self.assertEqual(error.to_dict()['1st'], 'bad 1st') | ||||||
|  |  | ||||||
|         # 2nd level error schema |         # 2nd level error schema | ||||||
|         error.errors = {'1st': ValidationError('bad 1st', errors={ |         error.errors = {'1st': ValidationError('bad 1st', errors={ | ||||||
|             '2nd': ValidationError('bad 2nd'), |             '2nd': ValidationError('bad 2nd'), | ||||||
|         })} |         })} | ||||||
|         self.assertTrue('1st' in error.to_dict()) |         self.assertIn('1st', error.to_dict()) | ||||||
|         self.assertTrue(isinstance(error.to_dict()['1st'], dict)) |         self.assertIsInstance(error.to_dict()['1st'], dict) | ||||||
|         self.assertTrue('2nd' in error.to_dict()['1st']) |         self.assertIn('2nd', error.to_dict()['1st']) | ||||||
|         self.assertEqual(error.to_dict()['1st']['2nd'], 'bad 2nd') |         self.assertEqual(error.to_dict()['1st']['2nd'], 'bad 2nd') | ||||||
|  |  | ||||||
|         # moar levels |         # moar levels | ||||||
| @@ -40,10 +40,10 @@ class ValidatorErrorTest(unittest.TestCase): | |||||||
|                 }), |                 }), | ||||||
|             }), |             }), | ||||||
|         })} |         })} | ||||||
|         self.assertTrue('1st' in error.to_dict()) |         self.assertIn('1st', error.to_dict()) | ||||||
|         self.assertTrue('2nd' in error.to_dict()['1st']) |         self.assertIn('2nd', error.to_dict()['1st']) | ||||||
|         self.assertTrue('3rd' in error.to_dict()['1st']['2nd']) |         self.assertIn('3rd', error.to_dict()['1st']['2nd']) | ||||||
|         self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd']) |         self.assertIn('4th', error.to_dict()['1st']['2nd']['3rd']) | ||||||
|         self.assertEqual(error.to_dict()['1st']['2nd']['3rd']['4th'], |         self.assertEqual(error.to_dict()['1st']['2nd']['3rd']['4th'], | ||||||
|                          'Inception') |                          'Inception') | ||||||
|  |  | ||||||
| @@ -58,7 +58,7 @@ class ValidatorErrorTest(unittest.TestCase): | |||||||
|         try: |         try: | ||||||
|             User().validate() |             User().validate() | ||||||
|         except ValidationError as e: |         except ValidationError as e: | ||||||
|             self.assertTrue("User:None" in e.message) |             self.assertIn("User:None", e.message) | ||||||
|             self.assertEqual(e.to_dict(), { |             self.assertEqual(e.to_dict(), { | ||||||
|                 'username': 'Field is required', |                 'username': 'Field is required', | ||||||
|                 'name': 'Field is required'}) |                 'name': 'Field is required'}) | ||||||
| @@ -68,7 +68,7 @@ class ValidatorErrorTest(unittest.TestCase): | |||||||
|         try: |         try: | ||||||
|             user.save() |             user.save() | ||||||
|         except ValidationError as e: |         except ValidationError as e: | ||||||
|             self.assertTrue("User:RossC0" in e.message) |             self.assertIn("User:RossC0", e.message) | ||||||
|             self.assertEqual(e.to_dict(), { |             self.assertEqual(e.to_dict(), { | ||||||
|                 'name': 'Field is required'}) |                 'name': 'Field is required'}) | ||||||
|  |  | ||||||
| @@ -116,7 +116,7 @@ class ValidatorErrorTest(unittest.TestCase): | |||||||
|         try: |         try: | ||||||
|             Doc(id="bad").validate() |             Doc(id="bad").validate() | ||||||
|         except ValidationError as e: |         except ValidationError as e: | ||||||
|             self.assertTrue("SubDoc:None" in e.message) |             self.assertIn("SubDoc:None", e.message) | ||||||
|             self.assertEqual(e.to_dict(), { |             self.assertEqual(e.to_dict(), { | ||||||
|                 "e": {'val': 'OK could not be converted to int'}}) |                 "e": {'val': 'OK could not be converted to int'}}) | ||||||
|  |  | ||||||
| @@ -127,14 +127,14 @@ class ValidatorErrorTest(unittest.TestCase): | |||||||
|         doc = Doc.objects.first() |         doc = Doc.objects.first() | ||||||
|         keys = doc._data.keys() |         keys = doc._data.keys() | ||||||
|         self.assertEqual(2, len(keys)) |         self.assertEqual(2, len(keys)) | ||||||
|         self.assertTrue('e' in keys) |         self.assertIn('e', keys) | ||||||
|         self.assertTrue('id' in keys) |         self.assertIn('id', keys) | ||||||
|  |  | ||||||
|         doc.e.val = "OK" |         doc.e.val = "OK" | ||||||
|         try: |         try: | ||||||
|             doc.save() |             doc.save() | ||||||
|         except ValidationError as e: |         except ValidationError as e: | ||||||
|             self.assertTrue("Doc:test" in e.message) |             self.assertIn("Doc:test", e.message) | ||||||
|             self.assertEqual(e.to_dict(), { |             self.assertEqual(e.to_dict(), { | ||||||
|                 "e": {'val': 'OK could not be converted to int'}}) |                 "e": {'val': 'OK could not be converted to int'}}) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,3 +1,3 @@ | |||||||
| from fields import * | from .fields import * | ||||||
| from file_tests import * | from .file_tests import * | ||||||
| from geo import * | from .geo import * | ||||||
|   | |||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -24,6 +24,16 @@ TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') | |||||||
| TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png') | TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png') | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def get_file(path): | ||||||
|  |     """Use a BytesIO instead of a file to allow | ||||||
|  |     to have a one-liner and avoid that the file remains opened""" | ||||||
|  |     bytes_io = StringIO() | ||||||
|  |     with open(path, 'rb') as f: | ||||||
|  |         bytes_io.write(f.read()) | ||||||
|  |     bytes_io.seek(0) | ||||||
|  |     return bytes_io | ||||||
|  |  | ||||||
|  |  | ||||||
| class FileTest(MongoDBTestCase): | class FileTest(MongoDBTestCase): | ||||||
|  |  | ||||||
|     def tearDown(self): |     def tearDown(self): | ||||||
| @@ -53,8 +63,8 @@ class FileTest(MongoDBTestCase): | |||||||
|         putfile.save() |         putfile.save() | ||||||
|  |  | ||||||
|         result = PutFile.objects.first() |         result = PutFile.objects.first() | ||||||
|         self.assertTrue(putfile == result) |         self.assertEqual(putfile, result) | ||||||
|         self.assertEqual("%s" % result.the_file, "<GridFSProxy: hello>") |         self.assertEqual("%s" % result.the_file, "<GridFSProxy: hello (%s)>" % result.the_file.grid_id) | ||||||
|         self.assertEqual(result.the_file.read(), text) |         self.assertEqual(result.the_file.read(), text) | ||||||
|         self.assertEqual(result.the_file.content_type, content_type) |         self.assertEqual(result.the_file.content_type, content_type) | ||||||
|         result.the_file.delete()  # Remove file from GridFS |         result.the_file.delete()  # Remove file from GridFS | ||||||
| @@ -71,7 +81,7 @@ class FileTest(MongoDBTestCase): | |||||||
|         putfile.save() |         putfile.save() | ||||||
|  |  | ||||||
|         result = PutFile.objects.first() |         result = PutFile.objects.first() | ||||||
|         self.assertTrue(putfile == result) |         self.assertEqual(putfile, result) | ||||||
|         self.assertEqual(result.the_file.read(), text) |         self.assertEqual(result.the_file.read(), text) | ||||||
|         self.assertEqual(result.the_file.content_type, content_type) |         self.assertEqual(result.the_file.content_type, content_type) | ||||||
|         result.the_file.delete() |         result.the_file.delete() | ||||||
| @@ -96,7 +106,7 @@ class FileTest(MongoDBTestCase): | |||||||
|         streamfile.save() |         streamfile.save() | ||||||
|  |  | ||||||
|         result = StreamFile.objects.first() |         result = StreamFile.objects.first() | ||||||
|         self.assertTrue(streamfile == result) |         self.assertEqual(streamfile, result) | ||||||
|         self.assertEqual(result.the_file.read(), text + more_text) |         self.assertEqual(result.the_file.read(), text + more_text) | ||||||
|         self.assertEqual(result.the_file.content_type, content_type) |         self.assertEqual(result.the_file.content_type, content_type) | ||||||
|         result.the_file.seek(0) |         result.the_file.seek(0) | ||||||
| @@ -132,7 +142,7 @@ class FileTest(MongoDBTestCase): | |||||||
|         streamfile.save() |         streamfile.save() | ||||||
|  |  | ||||||
|         result = StreamFile.objects.first() |         result = StreamFile.objects.first() | ||||||
|         self.assertTrue(streamfile == result) |         self.assertEqual(streamfile, result) | ||||||
|         self.assertEqual(result.the_file.read(), text + more_text) |         self.assertEqual(result.the_file.read(), text + more_text) | ||||||
|         # self.assertEqual(result.the_file.content_type, content_type) |         # self.assertEqual(result.the_file.content_type, content_type) | ||||||
|         result.the_file.seek(0) |         result.the_file.seek(0) | ||||||
| @@ -161,7 +171,7 @@ class FileTest(MongoDBTestCase): | |||||||
|         setfile.save() |         setfile.save() | ||||||
|  |  | ||||||
|         result = SetFile.objects.first() |         result = SetFile.objects.first() | ||||||
|         self.assertTrue(setfile == result) |         self.assertEqual(setfile, result) | ||||||
|         self.assertEqual(result.the_file.read(), text) |         self.assertEqual(result.the_file.read(), text) | ||||||
|  |  | ||||||
|         # Try replacing file with new one |         # Try replacing file with new one | ||||||
| @@ -169,7 +179,7 @@ class FileTest(MongoDBTestCase): | |||||||
|         result.save() |         result.save() | ||||||
|  |  | ||||||
|         result = SetFile.objects.first() |         result = SetFile.objects.first() | ||||||
|         self.assertTrue(setfile == result) |         self.assertEqual(setfile, result) | ||||||
|         self.assertEqual(result.the_file.read(), more_text) |         self.assertEqual(result.the_file.read(), more_text) | ||||||
|         result.the_file.delete() |         result.the_file.delete() | ||||||
|  |  | ||||||
| @@ -231,8 +241,8 @@ class FileTest(MongoDBTestCase): | |||||||
|         test_file_dupe = TestFile() |         test_file_dupe = TestFile() | ||||||
|         data = test_file_dupe.the_file.read()  # Should be None |         data = test_file_dupe.the_file.read()  # Should be None | ||||||
|  |  | ||||||
|         self.assertTrue(test_file.name != test_file_dupe.name) |         self.assertNotEqual(test_file.name, test_file_dupe.name) | ||||||
|         self.assertTrue(test_file.the_file.read() != data) |         self.assertNotEqual(test_file.the_file.read(), data) | ||||||
|  |  | ||||||
|         TestFile.drop_collection() |         TestFile.drop_collection() | ||||||
|  |  | ||||||
| @@ -247,8 +257,8 @@ class FileTest(MongoDBTestCase): | |||||||
|         Animal.drop_collection() |         Animal.drop_collection() | ||||||
|         marmot = Animal(genus='Marmota', family='Sciuridae') |         marmot = Animal(genus='Marmota', family='Sciuridae') | ||||||
|  |  | ||||||
|         marmot_photo = open(TEST_IMAGE_PATH, 'rb')  # Retrieve a photo from disk |         marmot_photo_content = get_file(TEST_IMAGE_PATH)  # Retrieve a photo from disk | ||||||
|         marmot.photo.put(marmot_photo, content_type='image/jpeg', foo='bar') |         marmot.photo.put(marmot_photo_content, content_type='image/jpeg', foo='bar') | ||||||
|         marmot.photo.close() |         marmot.photo.close() | ||||||
|         marmot.save() |         marmot.save() | ||||||
|  |  | ||||||
| @@ -261,11 +271,11 @@ class FileTest(MongoDBTestCase): | |||||||
|             the_file = FileField() |             the_file = FileField() | ||||||
|         TestFile.drop_collection() |         TestFile.drop_collection() | ||||||
|  |  | ||||||
|         test_file = TestFile(the_file=open(TEST_IMAGE_PATH, 'rb')).save() |         test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save() | ||||||
|         self.assertEqual(test_file.the_file.get().length, 8313) |         self.assertEqual(test_file.the_file.get().length, 8313) | ||||||
|  |  | ||||||
|         test_file = TestFile.objects.first() |         test_file = TestFile.objects.first() | ||||||
|         test_file.the_file = open(TEST_IMAGE2_PATH, 'rb') |         test_file.the_file = get_file(TEST_IMAGE2_PATH) | ||||||
|         test_file.save() |         test_file.save() | ||||||
|         self.assertEqual(test_file.the_file.get().length, 4971) |         self.assertEqual(test_file.the_file.get().length, 4971) | ||||||
|  |  | ||||||
| @@ -291,7 +301,7 @@ class FileTest(MongoDBTestCase): | |||||||
|             the_file = FileField() |             the_file = FileField() | ||||||
|  |  | ||||||
|         test_file = TestFile() |         test_file = TestFile() | ||||||
|         self.assertFalse(test_file.the_file in [{"test": 1}]) |         self.assertNotIn(test_file.the_file, [{"test": 1}]) | ||||||
|  |  | ||||||
|     def test_file_disk_space(self): |     def test_file_disk_space(self): | ||||||
|         """ Test disk space usage when we delete/replace a file """ |         """ Test disk space usage when we delete/replace a file """ | ||||||
| @@ -310,16 +320,16 @@ class FileTest(MongoDBTestCase): | |||||||
|  |  | ||||||
|         files = db.fs.files.find() |         files = db.fs.files.find() | ||||||
|         chunks = db.fs.chunks.find() |         chunks = db.fs.chunks.find() | ||||||
|         self.assertEquals(len(list(files)), 1) |         self.assertEqual(len(list(files)), 1) | ||||||
|         self.assertEquals(len(list(chunks)), 1) |         self.assertEqual(len(list(chunks)), 1) | ||||||
|  |  | ||||||
|         # Deleting the docoument should delete the files |         # Deleting the docoument should delete the files | ||||||
|         testfile.delete() |         testfile.delete() | ||||||
|  |  | ||||||
|         files = db.fs.files.find() |         files = db.fs.files.find() | ||||||
|         chunks = db.fs.chunks.find() |         chunks = db.fs.chunks.find() | ||||||
|         self.assertEquals(len(list(files)), 0) |         self.assertEqual(len(list(files)), 0) | ||||||
|         self.assertEquals(len(list(chunks)), 0) |         self.assertEqual(len(list(chunks)), 0) | ||||||
|  |  | ||||||
|         # Test case where we don't store a file in the first place |         # Test case where we don't store a file in the first place | ||||||
|         testfile = TestFile() |         testfile = TestFile() | ||||||
| @@ -327,15 +337,15 @@ class FileTest(MongoDBTestCase): | |||||||
|  |  | ||||||
|         files = db.fs.files.find() |         files = db.fs.files.find() | ||||||
|         chunks = db.fs.chunks.find() |         chunks = db.fs.chunks.find() | ||||||
|         self.assertEquals(len(list(files)), 0) |         self.assertEqual(len(list(files)), 0) | ||||||
|         self.assertEquals(len(list(chunks)), 0) |         self.assertEqual(len(list(chunks)), 0) | ||||||
|  |  | ||||||
|         testfile.delete() |         testfile.delete() | ||||||
|  |  | ||||||
|         files = db.fs.files.find() |         files = db.fs.files.find() | ||||||
|         chunks = db.fs.chunks.find() |         chunks = db.fs.chunks.find() | ||||||
|         self.assertEquals(len(list(files)), 0) |         self.assertEqual(len(list(files)), 0) | ||||||
|         self.assertEquals(len(list(chunks)), 0) |         self.assertEqual(len(list(chunks)), 0) | ||||||
|  |  | ||||||
|         # Test case where we overwrite the file |         # Test case where we overwrite the file | ||||||
|         testfile = TestFile() |         testfile = TestFile() | ||||||
| @@ -348,15 +358,15 @@ class FileTest(MongoDBTestCase): | |||||||
|  |  | ||||||
|         files = db.fs.files.find() |         files = db.fs.files.find() | ||||||
|         chunks = db.fs.chunks.find() |         chunks = db.fs.chunks.find() | ||||||
|         self.assertEquals(len(list(files)), 1) |         self.assertEqual(len(list(files)), 1) | ||||||
|         self.assertEquals(len(list(chunks)), 1) |         self.assertEqual(len(list(chunks)), 1) | ||||||
|  |  | ||||||
|         testfile.delete() |         testfile.delete() | ||||||
|  |  | ||||||
|         files = db.fs.files.find() |         files = db.fs.files.find() | ||||||
|         chunks = db.fs.chunks.find() |         chunks = db.fs.chunks.find() | ||||||
|         self.assertEquals(len(list(files)), 0) |         self.assertEqual(len(list(files)), 0) | ||||||
|         self.assertEquals(len(list(chunks)), 0) |         self.assertEqual(len(list(chunks)), 0) | ||||||
|  |  | ||||||
|     def test_image_field(self): |     def test_image_field(self): | ||||||
|         if not HAS_PIL: |         if not HAS_PIL: | ||||||
| @@ -379,7 +389,7 @@ class FileTest(MongoDBTestCase): | |||||||
|                 self.assertEqual("%s" % e, "Invalid image: cannot identify image file %s" % f) |                 self.assertEqual("%s" % e, "Invalid image: cannot identify image file %s" % f) | ||||||
|  |  | ||||||
|         t = TestImage() |         t = TestImage() | ||||||
|         t.image.put(open(TEST_IMAGE_PATH, 'rb')) |         t.image.put(get_file(TEST_IMAGE_PATH)) | ||||||
|         t.save() |         t.save() | ||||||
|  |  | ||||||
|         t = TestImage.objects.first() |         t = TestImage.objects.first() | ||||||
| @@ -400,11 +410,11 @@ class FileTest(MongoDBTestCase): | |||||||
|             the_file = ImageField() |             the_file = ImageField() | ||||||
|         TestFile.drop_collection() |         TestFile.drop_collection() | ||||||
|  |  | ||||||
|         test_file = TestFile(the_file=open(TEST_IMAGE_PATH, 'rb')).save() |         test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save() | ||||||
|         self.assertEqual(test_file.the_file.size, (371, 76)) |         self.assertEqual(test_file.the_file.size, (371, 76)) | ||||||
|  |  | ||||||
|         test_file = TestFile.objects.first() |         test_file = TestFile.objects.first() | ||||||
|         test_file.the_file = open(TEST_IMAGE2_PATH, 'rb') |         test_file.the_file = get_file(TEST_IMAGE2_PATH) | ||||||
|         test_file.save() |         test_file.save() | ||||||
|         self.assertEqual(test_file.the_file.size, (45, 101)) |         self.assertEqual(test_file.the_file.size, (45, 101)) | ||||||
|  |  | ||||||
| @@ -418,7 +428,7 @@ class FileTest(MongoDBTestCase): | |||||||
|         TestImage.drop_collection() |         TestImage.drop_collection() | ||||||
|  |  | ||||||
|         t = TestImage() |         t = TestImage() | ||||||
|         t.image.put(open(TEST_IMAGE_PATH, 'rb')) |         t.image.put(get_file(TEST_IMAGE_PATH)) | ||||||
|         t.save() |         t.save() | ||||||
|  |  | ||||||
|         t = TestImage.objects.first() |         t = TestImage.objects.first() | ||||||
| @@ -441,7 +451,7 @@ class FileTest(MongoDBTestCase): | |||||||
|         TestImage.drop_collection() |         TestImage.drop_collection() | ||||||
|  |  | ||||||
|         t = TestImage() |         t = TestImage() | ||||||
|         t.image.put(open(TEST_IMAGE_PATH, 'rb')) |         t.image.put(get_file(TEST_IMAGE_PATH)) | ||||||
|         t.save() |         t.save() | ||||||
|  |  | ||||||
|         t = TestImage.objects.first() |         t = TestImage.objects.first() | ||||||
| @@ -464,7 +474,7 @@ class FileTest(MongoDBTestCase): | |||||||
|         TestImage.drop_collection() |         TestImage.drop_collection() | ||||||
|  |  | ||||||
|         t = TestImage() |         t = TestImage() | ||||||
|         t.image.put(open(TEST_IMAGE_PATH, 'rb')) |         t.image.put(get_file(TEST_IMAGE_PATH)) | ||||||
|         t.save() |         t.save() | ||||||
|  |  | ||||||
|         t = TestImage.objects.first() |         t = TestImage.objects.first() | ||||||
| @@ -542,8 +552,8 @@ class FileTest(MongoDBTestCase): | |||||||
|         TestImage.drop_collection() |         TestImage.drop_collection() | ||||||
|  |  | ||||||
|         t = TestImage() |         t = TestImage() | ||||||
|         t.image1.put(open(TEST_IMAGE_PATH, 'rb')) |         t.image1.put(get_file(TEST_IMAGE_PATH)) | ||||||
|         t.image2.put(open(TEST_IMAGE2_PATH, 'rb')) |         t.image2.put(get_file(TEST_IMAGE2_PATH)) | ||||||
|         t.save() |         t.save() | ||||||
|  |  | ||||||
|         test = TestImage.objects.first() |         test = TestImage.objects.first() | ||||||
| @@ -563,12 +573,10 @@ class FileTest(MongoDBTestCase): | |||||||
|         Animal.drop_collection() |         Animal.drop_collection() | ||||||
|         marmot = Animal(genus='Marmota', family='Sciuridae') |         marmot = Animal(genus='Marmota', family='Sciuridae') | ||||||
|  |  | ||||||
|         marmot_photo = open(TEST_IMAGE_PATH, 'rb')  # Retrieve a photo from disk |         with open(TEST_IMAGE_PATH, 'rb') as marmot_photo:   # Retrieve a photo from disk | ||||||
|  |             photos_field = marmot._fields['photos'].field | ||||||
|         photos_field = marmot._fields['photos'].field |             new_proxy = photos_field.get_proxy_obj('photos', marmot) | ||||||
|         new_proxy = photos_field.get_proxy_obj('photos', marmot) |             new_proxy.put(marmot_photo, content_type='image/jpeg', foo='bar') | ||||||
|         new_proxy.put(marmot_photo, content_type='image/jpeg', foo='bar') |  | ||||||
|         marmot_photo.close() |  | ||||||
|  |  | ||||||
|         marmot.photos.append(new_proxy) |         marmot.photos.append(new_proxy) | ||||||
|         marmot.save() |         marmot.save() | ||||||
| @@ -578,5 +586,6 @@ class FileTest(MongoDBTestCase): | |||||||
|         self.assertEqual(marmot.photos[0].foo, 'bar') |         self.assertEqual(marmot.photos[0].foo, 'bar') | ||||||
|         self.assertEqual(marmot.photos[0].get().length, 8313) |         self.assertEqual(marmot.photos[0].get().length, 8313) | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|     unittest.main() |     unittest.main() | ||||||
|   | |||||||
| @@ -40,6 +40,11 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|             expected = "Both values (%s) in point must be float or int" % repr(coord) |             expected = "Both values (%s) in point must be float or int" % repr(coord) | ||||||
|             self._test_for_expected_error(Location, coord, expected) |             self._test_for_expected_error(Location, coord, expected) | ||||||
|  |  | ||||||
|  |         invalid_coords = [21, 4, 'a'] | ||||||
|  |         for coord in invalid_coords: | ||||||
|  |             expected = "GeoPointField can only accept tuples or lists of (x, y)" | ||||||
|  |             self._test_for_expected_error(Location, coord, expected) | ||||||
|  |  | ||||||
|     def test_point_validation(self): |     def test_point_validation(self): | ||||||
|         class Location(Document): |         class Location(Document): | ||||||
|             loc = PointField() |             loc = PointField() | ||||||
| @@ -298,9 +303,9 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|             polygon = PolygonField() |             polygon = PolygonField() | ||||||
|  |  | ||||||
|         geo_indicies = Event._geo_indices() |         geo_indicies = Event._geo_indices() | ||||||
|         self.assertTrue({'fields': [('line', '2dsphere')]} in geo_indicies) |         self.assertIn({'fields': [('line', '2dsphere')]}, geo_indicies) | ||||||
|         self.assertTrue({'fields': [('polygon', '2dsphere')]} in geo_indicies) |         self.assertIn({'fields': [('polygon', '2dsphere')]}, geo_indicies) | ||||||
|         self.assertTrue({'fields': [('point', '2dsphere')]} in geo_indicies) |         self.assertIn({'fields': [('point', '2dsphere')]}, geo_indicies) | ||||||
|  |  | ||||||
|     def test_indexes_2dsphere_embedded(self): |     def test_indexes_2dsphere_embedded(self): | ||||||
|         """Ensure that indexes are created automatically for GeoPointFields. |         """Ensure that indexes are created automatically for GeoPointFields. | ||||||
| @@ -316,9 +321,9 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|             venue = EmbeddedDocumentField(Venue) |             venue = EmbeddedDocumentField(Venue) | ||||||
|  |  | ||||||
|         geo_indicies = Event._geo_indices() |         geo_indicies = Event._geo_indices() | ||||||
|         self.assertTrue({'fields': [('venue.line', '2dsphere')]} in geo_indicies) |         self.assertIn({'fields': [('venue.line', '2dsphere')]}, geo_indicies) | ||||||
|         self.assertTrue({'fields': [('venue.polygon', '2dsphere')]} in geo_indicies) |         self.assertIn({'fields': [('venue.polygon', '2dsphere')]}, geo_indicies) | ||||||
|         self.assertTrue({'fields': [('venue.point', '2dsphere')]} in geo_indicies) |         self.assertIn({'fields': [('venue.point', '2dsphere')]}, geo_indicies) | ||||||
|  |  | ||||||
|     def test_geo_indexes_recursion(self): |     def test_geo_indexes_recursion(self): | ||||||
|  |  | ||||||
| @@ -335,9 +340,9 @@ class GeoFieldTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         Parent(name='Berlin').save() |         Parent(name='Berlin').save() | ||||||
|         info = Parent._get_collection().index_information() |         info = Parent._get_collection().index_information() | ||||||
|         self.assertFalse('location_2d' in info) |         self.assertNotIn('location_2d', info) | ||||||
|         info = Location._get_collection().index_information() |         info = Location._get_collection().index_information() | ||||||
|         self.assertTrue('location_2d' in info) |         self.assertIn('location_2d', info) | ||||||
|  |  | ||||||
|         self.assertEqual(len(Parent._geo_indices()), 0) |         self.assertEqual(len(Parent._geo_indices()), 0) | ||||||
|         self.assertEqual(len(Location._geo_indices()), 1) |         self.assertEqual(len(Location._geo_indices()), 1) | ||||||
|   | |||||||
							
								
								
									
										143
									
								
								tests/fields/test_binary_field.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										143
									
								
								tests/fields/test_binary_field.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,143 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | import uuid | ||||||
|  |  | ||||||
|  | from nose.plugins.skip import SkipTest | ||||||
|  | import six | ||||||
|  |  | ||||||
|  | from bson import Binary | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  | BIN_VALUE = six.b('\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5') | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestBinaryField(MongoDBTestCase): | ||||||
|  |     def test_binary_fields(self): | ||||||
|  |         """Ensure that binary fields can be stored and retrieved. | ||||||
|  |         """ | ||||||
|  |         class Attachment(Document): | ||||||
|  |             content_type = StringField() | ||||||
|  |             blob = BinaryField() | ||||||
|  |  | ||||||
|  |         BLOB = six.b('\xe6\x00\xc4\xff\x07') | ||||||
|  |         MIME_TYPE = 'application/octet-stream' | ||||||
|  |  | ||||||
|  |         Attachment.drop_collection() | ||||||
|  |  | ||||||
|  |         attachment = Attachment(content_type=MIME_TYPE, blob=BLOB) | ||||||
|  |         attachment.save() | ||||||
|  |  | ||||||
|  |         attachment_1 = Attachment.objects().first() | ||||||
|  |         self.assertEqual(MIME_TYPE, attachment_1.content_type) | ||||||
|  |         self.assertEqual(BLOB, six.binary_type(attachment_1.blob)) | ||||||
|  |  | ||||||
|  |     def test_validation_succeeds(self): | ||||||
|  |         """Ensure that valid values can be assigned to binary fields. | ||||||
|  |         """ | ||||||
|  |         class AttachmentRequired(Document): | ||||||
|  |             blob = BinaryField(required=True) | ||||||
|  |  | ||||||
|  |         class AttachmentSizeLimit(Document): | ||||||
|  |             blob = BinaryField(max_bytes=4) | ||||||
|  |  | ||||||
|  |         attachment_required = AttachmentRequired() | ||||||
|  |         self.assertRaises(ValidationError, attachment_required.validate) | ||||||
|  |         attachment_required.blob = Binary(six.b('\xe6\x00\xc4\xff\x07')) | ||||||
|  |         attachment_required.validate() | ||||||
|  |  | ||||||
|  |         _5_BYTES = six.b('\xe6\x00\xc4\xff\x07') | ||||||
|  |         _4_BYTES = six.b('\xe6\x00\xc4\xff') | ||||||
|  |         self.assertRaises(ValidationError, AttachmentSizeLimit(blob=_5_BYTES).validate) | ||||||
|  |         AttachmentSizeLimit(blob=_4_BYTES).validate() | ||||||
|  |  | ||||||
|  |     def test_validation_fails(self): | ||||||
|  |         """Ensure that invalid values cannot be assigned to binary fields.""" | ||||||
|  |  | ||||||
|  |         class Attachment(Document): | ||||||
|  |             blob = BinaryField() | ||||||
|  |  | ||||||
|  |         for invalid_data in (2, u'Im_a_unicode', ['some_str']): | ||||||
|  |             self.assertRaises(ValidationError, Attachment(blob=invalid_data).validate) | ||||||
|  |  | ||||||
|  |     def test__primary(self): | ||||||
|  |         class Attachment(Document): | ||||||
|  |             id = BinaryField(primary_key=True) | ||||||
|  |  | ||||||
|  |         Attachment.drop_collection() | ||||||
|  |         binary_id = uuid.uuid4().bytes | ||||||
|  |         att = Attachment(id=binary_id).save() | ||||||
|  |         self.assertEqual(1, Attachment.objects.count()) | ||||||
|  |         self.assertEqual(1, Attachment.objects.filter(id=att.id).count()) | ||||||
|  |         att.delete() | ||||||
|  |         self.assertEqual(0, Attachment.objects.count()) | ||||||
|  |  | ||||||
|  |     def test_primary_filter_by_binary_pk_as_str(self): | ||||||
|  |         raise SkipTest("Querying by id as string is not currently supported") | ||||||
|  |  | ||||||
|  |         class Attachment(Document): | ||||||
|  |             id = BinaryField(primary_key=True) | ||||||
|  |  | ||||||
|  |         Attachment.drop_collection() | ||||||
|  |         binary_id = uuid.uuid4().bytes | ||||||
|  |         att = Attachment(id=binary_id).save() | ||||||
|  |         self.assertEqual(1, Attachment.objects.filter(id=binary_id).count()) | ||||||
|  |         att.delete() | ||||||
|  |         self.assertEqual(0, Attachment.objects.count()) | ||||||
|  |  | ||||||
|  |     def test_match_querying_with_bytes(self): | ||||||
|  |         class MyDocument(Document): | ||||||
|  |             bin_field = BinaryField() | ||||||
|  |  | ||||||
|  |         MyDocument.drop_collection() | ||||||
|  |  | ||||||
|  |         doc = MyDocument(bin_field=BIN_VALUE).save() | ||||||
|  |         matched_doc = MyDocument.objects(bin_field=BIN_VALUE).first() | ||||||
|  |         self.assertEqual(matched_doc.id, doc.id) | ||||||
|  |  | ||||||
|  |     def test_match_querying_with_binary(self): | ||||||
|  |         class MyDocument(Document): | ||||||
|  |             bin_field = BinaryField() | ||||||
|  |  | ||||||
|  |         MyDocument.drop_collection() | ||||||
|  |  | ||||||
|  |         doc = MyDocument(bin_field=BIN_VALUE).save() | ||||||
|  |  | ||||||
|  |         matched_doc = MyDocument.objects(bin_field=Binary(BIN_VALUE)).first() | ||||||
|  |         self.assertEqual(matched_doc.id, doc.id) | ||||||
|  |  | ||||||
|  |     def test_modify_operation__set(self): | ||||||
|  |         """Ensures no regression of bug #1127""" | ||||||
|  |         class MyDocument(Document): | ||||||
|  |             some_field = StringField() | ||||||
|  |             bin_field = BinaryField() | ||||||
|  |  | ||||||
|  |         MyDocument.drop_collection() | ||||||
|  |  | ||||||
|  |         doc = MyDocument.objects(some_field='test').modify( | ||||||
|  |             upsert=True, new=True, | ||||||
|  |             set__bin_field=BIN_VALUE | ||||||
|  |         ) | ||||||
|  |         self.assertEqual(doc.some_field, 'test') | ||||||
|  |         if six.PY3: | ||||||
|  |             self.assertEqual(doc.bin_field, BIN_VALUE) | ||||||
|  |         else: | ||||||
|  |             self.assertEqual(doc.bin_field, Binary(BIN_VALUE)) | ||||||
|  |  | ||||||
|  |     def test_update_one(self): | ||||||
|  |         """Ensures no regression of bug #1127""" | ||||||
|  |         class MyDocument(Document): | ||||||
|  |             bin_field = BinaryField() | ||||||
|  |  | ||||||
|  |         MyDocument.drop_collection() | ||||||
|  |  | ||||||
|  |         bin_data = six.b('\xe6\x00\xc4\xff\x07') | ||||||
|  |         doc = MyDocument(bin_field=bin_data).save() | ||||||
|  |  | ||||||
|  |         n_updated = MyDocument.objects(bin_field=bin_data).update_one(bin_field=BIN_VALUE) | ||||||
|  |         self.assertEqual(n_updated, 1) | ||||||
|  |         fetched = MyDocument.objects.with_id(doc.id) | ||||||
|  |         if six.PY3: | ||||||
|  |             self.assertEqual(fetched.bin_field, BIN_VALUE) | ||||||
|  |         else: | ||||||
|  |             self.assertEqual(fetched.bin_field, Binary(BIN_VALUE)) | ||||||
							
								
								
									
										49
									
								
								tests/fields/test_boolean_field.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										49
									
								
								tests/fields/test_boolean_field.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,49 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | from mongoengine import * | ||||||
|  |  | ||||||
|  | from tests.utils import MongoDBTestCase, get_as_pymongo | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestBooleanField(MongoDBTestCase): | ||||||
|  |     def test_storage(self): | ||||||
|  |         class Person(Document): | ||||||
|  |             admin = BooleanField() | ||||||
|  |  | ||||||
|  |         person = Person(admin=True) | ||||||
|  |         person.save() | ||||||
|  |         self.assertEqual( | ||||||
|  |             get_as_pymongo(person), | ||||||
|  |             {'_id': person.id, | ||||||
|  |              'admin': True}) | ||||||
|  |  | ||||||
|  |     def test_validation(self): | ||||||
|  |         """Ensure that invalid values cannot be assigned to boolean | ||||||
|  |         fields. | ||||||
|  |         """ | ||||||
|  |         class Person(Document): | ||||||
|  |             admin = BooleanField() | ||||||
|  |  | ||||||
|  |         person = Person() | ||||||
|  |         person.admin = True | ||||||
|  |         person.validate() | ||||||
|  |  | ||||||
|  |         person.admin = 2 | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |         person.admin = 'Yes' | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |         person.admin = 'False' | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |  | ||||||
|  |     def test_weirdness_constructor(self): | ||||||
|  |         """When attribute is set in contructor, it gets cast into a bool | ||||||
|  |         which causes some weird behavior. We dont necessarily want to maintain this behavior | ||||||
|  |         but its a known issue | ||||||
|  |         """ | ||||||
|  |         class Person(Document): | ||||||
|  |             admin = BooleanField() | ||||||
|  |  | ||||||
|  |         new_person = Person(admin='False') | ||||||
|  |         self.assertTrue(new_person.admin) | ||||||
|  |  | ||||||
|  |         new_person = Person(admin='0') | ||||||
|  |         self.assertTrue(new_person.admin) | ||||||
							
								
								
									
										446
									
								
								tests/fields/test_cached_reference_field.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										446
									
								
								tests/fields/test_cached_reference_field.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,446 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | from decimal import Decimal | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  |  | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestCachedReferenceField(MongoDBTestCase): | ||||||
|  |  | ||||||
|  |     def test_get_and_save(self): | ||||||
|  |         """ | ||||||
|  |         Tests #1047: CachedReferenceField creates DBRefs on to_python, | ||||||
|  |         but can't save them on to_mongo. | ||||||
|  |         """ | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             tag = StringField() | ||||||
|  |  | ||||||
|  |         class Ocorrence(Document): | ||||||
|  |             person = StringField() | ||||||
|  |             animal = CachedReferenceField(Animal) | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         Ocorrence.drop_collection() | ||||||
|  |  | ||||||
|  |         Ocorrence(person="testte", | ||||||
|  |                   animal=Animal(name="Leopard", tag="heavy").save()).save() | ||||||
|  |         p = Ocorrence.objects.get() | ||||||
|  |         p.person = 'new_testte' | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |     def test_general_things(self): | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             tag = StringField() | ||||||
|  |  | ||||||
|  |         class Ocorrence(Document): | ||||||
|  |             person = StringField() | ||||||
|  |             animal = CachedReferenceField( | ||||||
|  |                 Animal, fields=['tag']) | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         Ocorrence.drop_collection() | ||||||
|  |  | ||||||
|  |         a = Animal(name="Leopard", tag="heavy") | ||||||
|  |         a.save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(Animal._cached_reference_fields, [Ocorrence.animal]) | ||||||
|  |         o = Ocorrence(person="teste", animal=a) | ||||||
|  |         o.save() | ||||||
|  |  | ||||||
|  |         p = Ocorrence(person="Wilson") | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(Ocorrence.objects(animal=None).count(), 1) | ||||||
|  |  | ||||||
|  |         self.assertEqual( | ||||||
|  |             a.to_mongo(fields=['tag']), {'tag': 'heavy', "_id": a.pk}) | ||||||
|  |  | ||||||
|  |         self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy') | ||||||
|  |  | ||||||
|  |         # counts | ||||||
|  |         Ocorrence(person="teste 2").save() | ||||||
|  |         Ocorrence(person="teste 3").save() | ||||||
|  |  | ||||||
|  |         count = Ocorrence.objects(animal__tag='heavy').count() | ||||||
|  |         self.assertEqual(count, 1) | ||||||
|  |  | ||||||
|  |         ocorrence = Ocorrence.objects(animal__tag='heavy').first() | ||||||
|  |         self.assertEqual(ocorrence.person, "teste") | ||||||
|  |         self.assertIsInstance(ocorrence.animal, Animal) | ||||||
|  |  | ||||||
|  |     def test_with_decimal(self): | ||||||
|  |         class PersonAuto(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             salary = DecimalField() | ||||||
|  |  | ||||||
|  |         class SocialTest(Document): | ||||||
|  |             group = StringField() | ||||||
|  |             person = CachedReferenceField( | ||||||
|  |                 PersonAuto, | ||||||
|  |                 fields=('salary',)) | ||||||
|  |  | ||||||
|  |         PersonAuto.drop_collection() | ||||||
|  |         SocialTest.drop_collection() | ||||||
|  |  | ||||||
|  |         p = PersonAuto(name="Alberto", salary=Decimal('7000.00')) | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         s = SocialTest(group="dev", person=p) | ||||||
|  |         s.save() | ||||||
|  |  | ||||||
|  |         self.assertEqual( | ||||||
|  |             SocialTest.objects._collection.find_one({'person.salary': 7000.00}), { | ||||||
|  |                 '_id': s.pk, | ||||||
|  |                 'group': s.group, | ||||||
|  |                 'person': { | ||||||
|  |                     '_id': p.pk, | ||||||
|  |                     'salary': 7000.00 | ||||||
|  |                 } | ||||||
|  |             }) | ||||||
|  |  | ||||||
|  |     def test_cached_reference_field_reference(self): | ||||||
|  |         class Group(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             group = ReferenceField(Group) | ||||||
|  |  | ||||||
|  |         class SocialData(Document): | ||||||
|  |             obs = StringField() | ||||||
|  |             tags = ListField( | ||||||
|  |                 StringField()) | ||||||
|  |             person = CachedReferenceField( | ||||||
|  |                 Person, | ||||||
|  |                 fields=('group',)) | ||||||
|  |  | ||||||
|  |         Group.drop_collection() | ||||||
|  |         Person.drop_collection() | ||||||
|  |         SocialData.drop_collection() | ||||||
|  |  | ||||||
|  |         g1 = Group(name='dev') | ||||||
|  |         g1.save() | ||||||
|  |  | ||||||
|  |         g2 = Group(name="designers") | ||||||
|  |         g2.save() | ||||||
|  |  | ||||||
|  |         p1 = Person(name="Alberto", group=g1) | ||||||
|  |         p1.save() | ||||||
|  |  | ||||||
|  |         p2 = Person(name="Andre", group=g1) | ||||||
|  |         p2.save() | ||||||
|  |  | ||||||
|  |         p3 = Person(name="Afro design", group=g2) | ||||||
|  |         p3.save() | ||||||
|  |  | ||||||
|  |         s1 = SocialData(obs="testing 123", person=p1, tags=['tag1', 'tag2']) | ||||||
|  |         s1.save() | ||||||
|  |  | ||||||
|  |         s2 = SocialData(obs="testing 321", person=p3, tags=['tag3', 'tag4']) | ||||||
|  |         s2.save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(SocialData.objects._collection.find_one( | ||||||
|  |             {'tags': 'tag2'}), { | ||||||
|  |                 '_id': s1.pk, | ||||||
|  |                 'obs': 'testing 123', | ||||||
|  |                 'tags': ['tag1', 'tag2'], | ||||||
|  |                 'person': { | ||||||
|  |                     '_id': p1.pk, | ||||||
|  |                     'group': g1.pk | ||||||
|  |                 } | ||||||
|  |         }) | ||||||
|  |  | ||||||
|  |         self.assertEqual(SocialData.objects(person__group=g2).count(), 1) | ||||||
|  |         self.assertEqual(SocialData.objects(person__group=g2).first(), s2) | ||||||
|  |  | ||||||
|  |     def test_cached_reference_field_push_with_fields(self): | ||||||
|  |         class Product(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         Product.drop_collection() | ||||||
|  |  | ||||||
|  |         class Basket(Document): | ||||||
|  |             products = ListField(CachedReferenceField(Product, fields=['name'])) | ||||||
|  |  | ||||||
|  |         Basket.drop_collection() | ||||||
|  |         product1 = Product(name='abc').save() | ||||||
|  |         product2 = Product(name='def').save() | ||||||
|  |         basket = Basket(products=[product1]).save() | ||||||
|  |         self.assertEqual( | ||||||
|  |             Basket.objects._collection.find_one(), | ||||||
|  |             { | ||||||
|  |                 '_id': basket.pk, | ||||||
|  |                 'products': [ | ||||||
|  |                     { | ||||||
|  |                         '_id': product1.pk, | ||||||
|  |                         'name': product1.name | ||||||
|  |                     } | ||||||
|  |                 ] | ||||||
|  |             } | ||||||
|  |         ) | ||||||
|  |         # push to list | ||||||
|  |         basket.update(push__products=product2) | ||||||
|  |         basket.reload() | ||||||
|  |         self.assertEqual( | ||||||
|  |             Basket.objects._collection.find_one(), | ||||||
|  |             { | ||||||
|  |                 '_id': basket.pk, | ||||||
|  |                 'products': [ | ||||||
|  |                     { | ||||||
|  |                         '_id': product1.pk, | ||||||
|  |                         'name': product1.name | ||||||
|  |                     }, | ||||||
|  |                     { | ||||||
|  |                         '_id': product2.pk, | ||||||
|  |                         'name': product2.name | ||||||
|  |                     } | ||||||
|  |                 ] | ||||||
|  |             } | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def test_cached_reference_field_update_all(self): | ||||||
|  |         class Person(Document): | ||||||
|  |             TYPES = ( | ||||||
|  |                 ('pf', "PF"), | ||||||
|  |                 ('pj', "PJ") | ||||||
|  |             ) | ||||||
|  |             name = StringField() | ||||||
|  |             tp = StringField(choices=TYPES) | ||||||
|  |             father = CachedReferenceField('self', fields=('tp',)) | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         a1 = Person(name="Wilson Father", tp="pj") | ||||||
|  |         a1.save() | ||||||
|  |  | ||||||
|  |         a2 = Person(name='Wilson Junior', tp='pf', father=a1) | ||||||
|  |         a2.save() | ||||||
|  |  | ||||||
|  |         a2 = Person.objects.with_id(a2.id) | ||||||
|  |         self.assertEqual(a2.father.tp, a1.tp) | ||||||
|  |  | ||||||
|  |         self.assertEqual(dict(a2.to_mongo()), { | ||||||
|  |             "_id": a2.pk, | ||||||
|  |             "name": u"Wilson Junior", | ||||||
|  |             "tp": u"pf", | ||||||
|  |             "father": { | ||||||
|  |                 "_id": a1.pk, | ||||||
|  |                 "tp": u"pj" | ||||||
|  |             } | ||||||
|  |         }) | ||||||
|  |  | ||||||
|  |         self.assertEqual(Person.objects(father=a1)._query, { | ||||||
|  |             'father._id': a1.pk | ||||||
|  |         }) | ||||||
|  |         self.assertEqual(Person.objects(father=a1).count(), 1) | ||||||
|  |  | ||||||
|  |         Person.objects.update(set__tp="pf") | ||||||
|  |         Person.father.sync_all() | ||||||
|  |  | ||||||
|  |         a2.reload() | ||||||
|  |         self.assertEqual(dict(a2.to_mongo()), { | ||||||
|  |             "_id": a2.pk, | ||||||
|  |             "name": u"Wilson Junior", | ||||||
|  |             "tp": u"pf", | ||||||
|  |             "father": { | ||||||
|  |                 "_id": a1.pk, | ||||||
|  |                 "tp": u"pf" | ||||||
|  |             } | ||||||
|  |         }) | ||||||
|  |  | ||||||
|  |     def test_cached_reference_fields_on_embedded_documents(self): | ||||||
|  |         with self.assertRaises(InvalidDocumentError): | ||||||
|  |             class Test(Document): | ||||||
|  |                 name = StringField() | ||||||
|  |  | ||||||
|  |             type('WrongEmbeddedDocument', ( | ||||||
|  |                 EmbeddedDocument,), { | ||||||
|  |                     'test': CachedReferenceField(Test) | ||||||
|  |             }) | ||||||
|  |  | ||||||
|  |     def test_cached_reference_auto_sync(self): | ||||||
|  |         class Person(Document): | ||||||
|  |             TYPES = ( | ||||||
|  |                 ('pf', "PF"), | ||||||
|  |                 ('pj', "PJ") | ||||||
|  |             ) | ||||||
|  |             name = StringField() | ||||||
|  |             tp = StringField( | ||||||
|  |                 choices=TYPES | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |             father = CachedReferenceField('self', fields=('tp',)) | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         a1 = Person(name="Wilson Father", tp="pj") | ||||||
|  |         a1.save() | ||||||
|  |  | ||||||
|  |         a2 = Person(name='Wilson Junior', tp='pf', father=a1) | ||||||
|  |         a2.save() | ||||||
|  |  | ||||||
|  |         a1.tp = 'pf' | ||||||
|  |         a1.save() | ||||||
|  |  | ||||||
|  |         a2.reload() | ||||||
|  |         self.assertEqual(dict(a2.to_mongo()), { | ||||||
|  |             '_id': a2.pk, | ||||||
|  |             'name': 'Wilson Junior', | ||||||
|  |             'tp': 'pf', | ||||||
|  |             'father': { | ||||||
|  |                 '_id': a1.pk, | ||||||
|  |                 'tp': 'pf' | ||||||
|  |             } | ||||||
|  |         }) | ||||||
|  |  | ||||||
|  |     def test_cached_reference_auto_sync_disabled(self): | ||||||
|  |         class Persone(Document): | ||||||
|  |             TYPES = ( | ||||||
|  |                 ('pf', "PF"), | ||||||
|  |                 ('pj', "PJ") | ||||||
|  |             ) | ||||||
|  |             name = StringField() | ||||||
|  |             tp = StringField( | ||||||
|  |                 choices=TYPES | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |             father = CachedReferenceField( | ||||||
|  |                 'self', fields=('tp',), auto_sync=False) | ||||||
|  |  | ||||||
|  |         Persone.drop_collection() | ||||||
|  |  | ||||||
|  |         a1 = Persone(name="Wilson Father", tp="pj") | ||||||
|  |         a1.save() | ||||||
|  |  | ||||||
|  |         a2 = Persone(name='Wilson Junior', tp='pf', father=a1) | ||||||
|  |         a2.save() | ||||||
|  |  | ||||||
|  |         a1.tp = 'pf' | ||||||
|  |         a1.save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(Persone.objects._collection.find_one({'_id': a2.pk}), { | ||||||
|  |             '_id': a2.pk, | ||||||
|  |             'name': 'Wilson Junior', | ||||||
|  |             'tp': 'pf', | ||||||
|  |             'father': { | ||||||
|  |                 '_id': a1.pk, | ||||||
|  |                 'tp': 'pj' | ||||||
|  |             } | ||||||
|  |         }) | ||||||
|  |  | ||||||
|  |     def test_cached_reference_embedded_fields(self): | ||||||
|  |         class Owner(EmbeddedDocument): | ||||||
|  |             TPS = ( | ||||||
|  |                 ('n', "Normal"), | ||||||
|  |                 ('u', "Urgent") | ||||||
|  |             ) | ||||||
|  |             name = StringField() | ||||||
|  |             tp = StringField( | ||||||
|  |                 verbose_name="Type", | ||||||
|  |                 db_field="t", | ||||||
|  |                 choices=TPS) | ||||||
|  |  | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             tag = StringField() | ||||||
|  |  | ||||||
|  |             owner = EmbeddedDocumentField(Owner) | ||||||
|  |  | ||||||
|  |         class Ocorrence(Document): | ||||||
|  |             person = StringField() | ||||||
|  |             animal = CachedReferenceField( | ||||||
|  |                 Animal, fields=['tag', 'owner.tp']) | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         Ocorrence.drop_collection() | ||||||
|  |  | ||||||
|  |         a = Animal(name="Leopard", tag="heavy", | ||||||
|  |                    owner=Owner(tp='u', name="Wilson Júnior") | ||||||
|  |                    ) | ||||||
|  |         a.save() | ||||||
|  |  | ||||||
|  |         o = Ocorrence(person="teste", animal=a) | ||||||
|  |         o.save() | ||||||
|  |         self.assertEqual(dict(a.to_mongo(fields=['tag', 'owner.tp'])), { | ||||||
|  |             '_id': a.pk, | ||||||
|  |             'tag': 'heavy', | ||||||
|  |             'owner': { | ||||||
|  |                 't': 'u' | ||||||
|  |             } | ||||||
|  |         }) | ||||||
|  |         self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy') | ||||||
|  |         self.assertEqual(o.to_mongo()['animal']['owner']['t'], 'u') | ||||||
|  |  | ||||||
|  |         # Check to_mongo with fields | ||||||
|  |         self.assertNotIn('animal', o.to_mongo(fields=['person'])) | ||||||
|  |  | ||||||
|  |         # counts | ||||||
|  |         Ocorrence(person="teste 2").save() | ||||||
|  |         Ocorrence(person="teste 3").save() | ||||||
|  |  | ||||||
|  |         count = Ocorrence.objects( | ||||||
|  |             animal__tag='heavy', animal__owner__tp='u').count() | ||||||
|  |         self.assertEqual(count, 1) | ||||||
|  |  | ||||||
|  |         ocorrence = Ocorrence.objects( | ||||||
|  |             animal__tag='heavy', | ||||||
|  |             animal__owner__tp='u').first() | ||||||
|  |         self.assertEqual(ocorrence.person, "teste") | ||||||
|  |         self.assertIsInstance(ocorrence.animal, Animal) | ||||||
|  |  | ||||||
|  |     def test_cached_reference_embedded_list_fields(self): | ||||||
|  |         class Owner(EmbeddedDocument): | ||||||
|  |             name = StringField() | ||||||
|  |             tags = ListField(StringField()) | ||||||
|  |  | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             tag = StringField() | ||||||
|  |  | ||||||
|  |             owner = EmbeddedDocumentField(Owner) | ||||||
|  |  | ||||||
|  |         class Ocorrence(Document): | ||||||
|  |             person = StringField() | ||||||
|  |             animal = CachedReferenceField( | ||||||
|  |                 Animal, fields=['tag', 'owner.tags']) | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         Ocorrence.drop_collection() | ||||||
|  |  | ||||||
|  |         a = Animal(name="Leopard", tag="heavy", | ||||||
|  |                    owner=Owner(tags=['cool', 'funny'], | ||||||
|  |                                name="Wilson Júnior") | ||||||
|  |                    ) | ||||||
|  |         a.save() | ||||||
|  |  | ||||||
|  |         o = Ocorrence(person="teste 2", animal=a) | ||||||
|  |         o.save() | ||||||
|  |         self.assertEqual(dict(a.to_mongo(fields=['tag', 'owner.tags'])), { | ||||||
|  |             '_id': a.pk, | ||||||
|  |             'tag': 'heavy', | ||||||
|  |             'owner': { | ||||||
|  |                 'tags': ['cool', 'funny'] | ||||||
|  |             } | ||||||
|  |         }) | ||||||
|  |  | ||||||
|  |         self.assertEqual(o.to_mongo()['animal']['tag'], 'heavy') | ||||||
|  |         self.assertEqual(o.to_mongo()['animal']['owner']['tags'], | ||||||
|  |                          ['cool', 'funny']) | ||||||
|  |  | ||||||
|  |         # counts | ||||||
|  |         Ocorrence(person="teste 2").save() | ||||||
|  |         Ocorrence(person="teste 3").save() | ||||||
|  |  | ||||||
|  |         query = Ocorrence.objects( | ||||||
|  |             animal__tag='heavy', animal__owner__tags='cool')._query | ||||||
|  |         self.assertEqual( | ||||||
|  |             query, {'animal.owner.tags': 'cool', 'animal.tag': 'heavy'}) | ||||||
|  |  | ||||||
|  |         ocorrence = Ocorrence.objects( | ||||||
|  |             animal__tag='heavy', | ||||||
|  |             animal__owner__tags='cool').first() | ||||||
|  |         self.assertEqual(ocorrence.person, "teste 2") | ||||||
|  |         self.assertIsInstance(ocorrence.animal, Animal) | ||||||
							
								
								
									
										184
									
								
								tests/fields/test_complex_datetime_field.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										184
									
								
								tests/fields/test_complex_datetime_field.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,184 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | import datetime | ||||||
|  | import math | ||||||
|  | import itertools | ||||||
|  | import re | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  |  | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class ComplexDateTimeFieldTest(MongoDBTestCase): | ||||||
|  |     def test_complexdatetime_storage(self): | ||||||
|  |         """Tests for complex datetime fields - which can handle | ||||||
|  |         microseconds without rounding. | ||||||
|  |         """ | ||||||
|  |         class LogEntry(Document): | ||||||
|  |             date = ComplexDateTimeField() | ||||||
|  |             date_with_dots = ComplexDateTimeField(separator='.') | ||||||
|  |  | ||||||
|  |         LogEntry.drop_collection() | ||||||
|  |  | ||||||
|  |         # Post UTC - microseconds are rounded (down) nearest millisecond and | ||||||
|  |         # dropped - with default datetimefields | ||||||
|  |         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999) | ||||||
|  |         log = LogEntry() | ||||||
|  |         log.date = d1 | ||||||
|  |         log.save() | ||||||
|  |         log.reload() | ||||||
|  |         self.assertEqual(log.date, d1) | ||||||
|  |  | ||||||
|  |         # Post UTC - microseconds are rounded (down) nearest millisecond - with | ||||||
|  |         # default datetimefields | ||||||
|  |         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999) | ||||||
|  |         log.date = d1 | ||||||
|  |         log.save() | ||||||
|  |         log.reload() | ||||||
|  |         self.assertEqual(log.date, d1) | ||||||
|  |  | ||||||
|  |         # Pre UTC dates microseconds below 1000 are dropped - with default | ||||||
|  |         # datetimefields | ||||||
|  |         d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) | ||||||
|  |         log.date = d1 | ||||||
|  |         log.save() | ||||||
|  |         log.reload() | ||||||
|  |         self.assertEqual(log.date, d1) | ||||||
|  |  | ||||||
|  |         # Pre UTC microseconds above 1000 is wonky - with default datetimefields | ||||||
|  |         # log.date has an invalid microsecond value so I can't construct | ||||||
|  |         # a date to compare. | ||||||
|  |         for i in range(1001, 3113, 33): | ||||||
|  |             d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, i) | ||||||
|  |             log.date = d1 | ||||||
|  |             log.save() | ||||||
|  |             log.reload() | ||||||
|  |             self.assertEqual(log.date, d1) | ||||||
|  |             log1 = LogEntry.objects.get(date=d1) | ||||||
|  |             self.assertEqual(log, log1) | ||||||
|  |  | ||||||
|  |         # Test string padding | ||||||
|  |         microsecond = map(int, [math.pow(10, x) for x in range(6)]) | ||||||
|  |         mm = dd = hh = ii = ss = [1, 10] | ||||||
|  |  | ||||||
|  |         for values in itertools.product([2014], mm, dd, hh, ii, ss, microsecond): | ||||||
|  |             stored = LogEntry(date=datetime.datetime(*values)).to_mongo()['date'] | ||||||
|  |             self.assertTrue(re.match('^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$', stored) is not None) | ||||||
|  |  | ||||||
|  |         # Test separator | ||||||
|  |         stored = LogEntry(date_with_dots=datetime.datetime(2014, 1, 1)).to_mongo()['date_with_dots'] | ||||||
|  |         self.assertTrue(re.match('^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$', stored) is not None) | ||||||
|  |  | ||||||
|  |     def test_complexdatetime_usage(self): | ||||||
|  |         """Tests for complex datetime fields - which can handle | ||||||
|  |         microseconds without rounding. | ||||||
|  |         """ | ||||||
|  |         class LogEntry(Document): | ||||||
|  |             date = ComplexDateTimeField() | ||||||
|  |  | ||||||
|  |         LogEntry.drop_collection() | ||||||
|  |  | ||||||
|  |         d1 = datetime.datetime(1950, 1, 1, 0, 0, 1, 999) | ||||||
|  |         log = LogEntry() | ||||||
|  |         log.date = d1 | ||||||
|  |         log.save() | ||||||
|  |  | ||||||
|  |         log1 = LogEntry.objects.get(date=d1) | ||||||
|  |         self.assertEqual(log, log1) | ||||||
|  |  | ||||||
|  |         # create extra 59 log entries for a total of 60 | ||||||
|  |         for i in range(1951, 2010): | ||||||
|  |             d = datetime.datetime(i, 1, 1, 0, 0, 1, 999) | ||||||
|  |             LogEntry(date=d).save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(LogEntry.objects.count(), 60) | ||||||
|  |  | ||||||
|  |         # Test ordering | ||||||
|  |         logs = LogEntry.objects.order_by("date") | ||||||
|  |         i = 0 | ||||||
|  |         while i < 59: | ||||||
|  |             self.assertTrue(logs[i].date <= logs[i + 1].date) | ||||||
|  |             i += 1 | ||||||
|  |  | ||||||
|  |         logs = LogEntry.objects.order_by("-date") | ||||||
|  |         i = 0 | ||||||
|  |         while i < 59: | ||||||
|  |             self.assertTrue(logs[i].date >= logs[i + 1].date) | ||||||
|  |             i += 1 | ||||||
|  |  | ||||||
|  |         # Test searching | ||||||
|  |         logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1)) | ||||||
|  |         self.assertEqual(logs.count(), 30) | ||||||
|  |  | ||||||
|  |         logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1)) | ||||||
|  |         self.assertEqual(logs.count(), 30) | ||||||
|  |  | ||||||
|  |         logs = LogEntry.objects.filter( | ||||||
|  |             date__lte=datetime.datetime(2011, 1, 1), | ||||||
|  |             date__gte=datetime.datetime(2000, 1, 1), | ||||||
|  |         ) | ||||||
|  |         self.assertEqual(logs.count(), 10) | ||||||
|  |  | ||||||
|  |         LogEntry.drop_collection() | ||||||
|  |  | ||||||
|  |         # Test microsecond-level ordering/filtering | ||||||
|  |         for microsecond in (99, 999, 9999, 10000): | ||||||
|  |             LogEntry( | ||||||
|  |                 date=datetime.datetime(2015, 1, 1, 0, 0, 0, microsecond) | ||||||
|  |             ).save() | ||||||
|  |  | ||||||
|  |         logs = list(LogEntry.objects.order_by('date')) | ||||||
|  |         for next_idx, log in enumerate(logs[:-1], start=1): | ||||||
|  |             next_log = logs[next_idx] | ||||||
|  |             self.assertTrue(log.date < next_log.date) | ||||||
|  |  | ||||||
|  |         logs = list(LogEntry.objects.order_by('-date')) | ||||||
|  |         for next_idx, log in enumerate(logs[:-1], start=1): | ||||||
|  |             next_log = logs[next_idx] | ||||||
|  |             self.assertTrue(log.date > next_log.date) | ||||||
|  |  | ||||||
|  |         logs = LogEntry.objects.filter( | ||||||
|  |             date__lte=datetime.datetime(2015, 1, 1, 0, 0, 0, 10000)) | ||||||
|  |         self.assertEqual(logs.count(), 4) | ||||||
|  |  | ||||||
|  |     def test_no_default_value(self): | ||||||
|  |         class Log(Document): | ||||||
|  |             timestamp = ComplexDateTimeField() | ||||||
|  |  | ||||||
|  |         Log.drop_collection() | ||||||
|  |  | ||||||
|  |         log = Log() | ||||||
|  |         self.assertIsNone(log.timestamp) | ||||||
|  |         log.save() | ||||||
|  |  | ||||||
|  |         fetched_log = Log.objects.with_id(log.id) | ||||||
|  |         self.assertIsNone(fetched_log.timestamp) | ||||||
|  |  | ||||||
|  |     def test_default_static_value(self): | ||||||
|  |         NOW = datetime.datetime.utcnow() | ||||||
|  |         class Log(Document): | ||||||
|  |             timestamp = ComplexDateTimeField(default=NOW) | ||||||
|  |  | ||||||
|  |         Log.drop_collection() | ||||||
|  |  | ||||||
|  |         log = Log() | ||||||
|  |         self.assertEqual(log.timestamp, NOW) | ||||||
|  |         log.save() | ||||||
|  |  | ||||||
|  |         fetched_log = Log.objects.with_id(log.id) | ||||||
|  |         self.assertEqual(fetched_log.timestamp, NOW) | ||||||
|  |  | ||||||
|  |     def test_default_callable(self): | ||||||
|  |         NOW = datetime.datetime.utcnow() | ||||||
|  |  | ||||||
|  |         class Log(Document): | ||||||
|  |             timestamp = ComplexDateTimeField(default=datetime.datetime.utcnow) | ||||||
|  |  | ||||||
|  |         Log.drop_collection() | ||||||
|  |  | ||||||
|  |         log = Log() | ||||||
|  |         self.assertGreaterEqual(log.timestamp, NOW) | ||||||
|  |         log.save() | ||||||
|  |  | ||||||
|  |         fetched_log = Log.objects.with_id(log.id) | ||||||
|  |         self.assertGreaterEqual(fetched_log.timestamp, NOW) | ||||||
							
								
								
									
										165
									
								
								tests/fields/test_date_field.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										165
									
								
								tests/fields/test_date_field.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,165 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | import datetime | ||||||
|  | import six | ||||||
|  |  | ||||||
|  | try: | ||||||
|  |     import dateutil | ||||||
|  | except ImportError: | ||||||
|  |     dateutil = None | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  |  | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestDateField(MongoDBTestCase): | ||||||
|  |     def test_date_from_empty_string(self): | ||||||
|  |         """ | ||||||
|  |         Ensure an exception is raised when trying to | ||||||
|  |         cast an empty string to datetime. | ||||||
|  |         """ | ||||||
|  |         class MyDoc(Document): | ||||||
|  |             dt = DateField() | ||||||
|  |  | ||||||
|  |         md = MyDoc(dt='') | ||||||
|  |         self.assertRaises(ValidationError, md.save) | ||||||
|  |  | ||||||
|  |     def test_date_from_whitespace_string(self): | ||||||
|  |         """ | ||||||
|  |         Ensure an exception is raised when trying to | ||||||
|  |         cast a whitespace-only string to datetime. | ||||||
|  |         """ | ||||||
|  |         class MyDoc(Document): | ||||||
|  |             dt = DateField() | ||||||
|  |  | ||||||
|  |         md = MyDoc(dt='   ') | ||||||
|  |         self.assertRaises(ValidationError, md.save) | ||||||
|  |  | ||||||
|  |     def test_default_values_today(self): | ||||||
|  |         """Ensure that default field values are used when creating | ||||||
|  |         a document. | ||||||
|  |         """ | ||||||
|  |         class Person(Document): | ||||||
|  |             day = DateField(default=datetime.date.today) | ||||||
|  |  | ||||||
|  |         person = Person() | ||||||
|  |         person.validate() | ||||||
|  |         self.assertEqual(person.day, person.day) | ||||||
|  |         self.assertEqual(person.day, datetime.date.today()) | ||||||
|  |         self.assertEqual(person._data['day'], person.day) | ||||||
|  |  | ||||||
|  |     def test_date(self): | ||||||
|  |         """Tests showing pymongo date fields | ||||||
|  |  | ||||||
|  |         See: http://api.mongodb.org/python/current/api/bson/son.html#dt | ||||||
|  |         """ | ||||||
|  |         class LogEntry(Document): | ||||||
|  |             date = DateField() | ||||||
|  |  | ||||||
|  |         LogEntry.drop_collection() | ||||||
|  |  | ||||||
|  |         # Test can save dates | ||||||
|  |         log = LogEntry() | ||||||
|  |         log.date = datetime.date.today() | ||||||
|  |         log.save() | ||||||
|  |         log.reload() | ||||||
|  |         self.assertEqual(log.date, datetime.date.today()) | ||||||
|  |  | ||||||
|  |         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999) | ||||||
|  |         d2 = datetime.datetime(1970, 1, 1, 0, 0, 1) | ||||||
|  |         log = LogEntry() | ||||||
|  |         log.date = d1 | ||||||
|  |         log.save() | ||||||
|  |         log.reload() | ||||||
|  |         self.assertEqual(log.date, d1.date()) | ||||||
|  |         self.assertEqual(log.date, d2.date()) | ||||||
|  |  | ||||||
|  |         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999) | ||||||
|  |         d2 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9000) | ||||||
|  |         log.date = d1 | ||||||
|  |         log.save() | ||||||
|  |         log.reload() | ||||||
|  |         self.assertEqual(log.date, d1.date()) | ||||||
|  |         self.assertEqual(log.date, d2.date()) | ||||||
|  |  | ||||||
|  |         if not six.PY3: | ||||||
|  |             # Pre UTC dates microseconds below 1000 are dropped | ||||||
|  |             # This does not seem to be true in PY3 | ||||||
|  |             d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) | ||||||
|  |             d2 = datetime.datetime(1969, 12, 31, 23, 59, 59) | ||||||
|  |             log.date = d1 | ||||||
|  |             log.save() | ||||||
|  |             log.reload() | ||||||
|  |             self.assertEqual(log.date, d1.date()) | ||||||
|  |             self.assertEqual(log.date, d2.date()) | ||||||
|  |  | ||||||
|  |     def test_regular_usage(self): | ||||||
|  |         """Tests for regular datetime fields""" | ||||||
|  |         class LogEntry(Document): | ||||||
|  |             date = DateField() | ||||||
|  |  | ||||||
|  |         LogEntry.drop_collection() | ||||||
|  |  | ||||||
|  |         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1) | ||||||
|  |         log = LogEntry() | ||||||
|  |         log.date = d1 | ||||||
|  |         log.validate() | ||||||
|  |         log.save() | ||||||
|  |  | ||||||
|  |         for query in (d1, d1.isoformat(' ')): | ||||||
|  |             log1 = LogEntry.objects.get(date=query) | ||||||
|  |             self.assertEqual(log, log1) | ||||||
|  |  | ||||||
|  |         if dateutil: | ||||||
|  |             log1 = LogEntry.objects.get(date=d1.isoformat('T')) | ||||||
|  |             self.assertEqual(log, log1) | ||||||
|  |  | ||||||
|  |         # create additional 19 log entries for a total of 20 | ||||||
|  |         for i in range(1971, 1990): | ||||||
|  |             d = datetime.datetime(i, 1, 1, 0, 0, 1) | ||||||
|  |             LogEntry(date=d).save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(LogEntry.objects.count(), 20) | ||||||
|  |  | ||||||
|  |         # Test ordering | ||||||
|  |         logs = LogEntry.objects.order_by("date") | ||||||
|  |         i = 0 | ||||||
|  |         while i < 19: | ||||||
|  |             self.assertTrue(logs[i].date <= logs[i + 1].date) | ||||||
|  |             i += 1 | ||||||
|  |  | ||||||
|  |         logs = LogEntry.objects.order_by("-date") | ||||||
|  |         i = 0 | ||||||
|  |         while i < 19: | ||||||
|  |             self.assertTrue(logs[i].date >= logs[i + 1].date) | ||||||
|  |             i += 1 | ||||||
|  |  | ||||||
|  |         # Test searching | ||||||
|  |         logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1)) | ||||||
|  |         self.assertEqual(logs.count(), 10) | ||||||
|  |  | ||||||
|  |     def test_validation(self): | ||||||
|  |         """Ensure that invalid values cannot be assigned to datetime | ||||||
|  |         fields. | ||||||
|  |         """ | ||||||
|  |         class LogEntry(Document): | ||||||
|  |             time = DateField() | ||||||
|  |  | ||||||
|  |         log = LogEntry() | ||||||
|  |         log.time = datetime.datetime.now() | ||||||
|  |         log.validate() | ||||||
|  |  | ||||||
|  |         log.time = datetime.date.today() | ||||||
|  |         log.validate() | ||||||
|  |  | ||||||
|  |         log.time = datetime.datetime.now().isoformat(' ') | ||||||
|  |         log.validate() | ||||||
|  |  | ||||||
|  |         if dateutil: | ||||||
|  |             log.time = datetime.datetime.now().isoformat('T') | ||||||
|  |             log.validate() | ||||||
|  |  | ||||||
|  |         log.time = -1 | ||||||
|  |         self.assertRaises(ValidationError, log.validate) | ||||||
|  |         log.time = 'ABC' | ||||||
|  |         self.assertRaises(ValidationError, log.validate) | ||||||
							
								
								
									
										231
									
								
								tests/fields/test_datetime_field.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										231
									
								
								tests/fields/test_datetime_field.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,231 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | import datetime as dt | ||||||
|  | import six | ||||||
|  |  | ||||||
|  | try: | ||||||
|  |     import dateutil | ||||||
|  | except ImportError: | ||||||
|  |     dateutil = None | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  | from mongoengine import connection | ||||||
|  |  | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestDateTimeField(MongoDBTestCase): | ||||||
|  |     def test_datetime_from_empty_string(self): | ||||||
|  |         """ | ||||||
|  |         Ensure an exception is raised when trying to | ||||||
|  |         cast an empty string to datetime. | ||||||
|  |         """ | ||||||
|  |         class MyDoc(Document): | ||||||
|  |             dt = DateTimeField() | ||||||
|  |  | ||||||
|  |         md = MyDoc(dt='') | ||||||
|  |         self.assertRaises(ValidationError, md.save) | ||||||
|  |  | ||||||
|  |     def test_datetime_from_whitespace_string(self): | ||||||
|  |         """ | ||||||
|  |         Ensure an exception is raised when trying to | ||||||
|  |         cast a whitespace-only string to datetime. | ||||||
|  |         """ | ||||||
|  |         class MyDoc(Document): | ||||||
|  |             dt = DateTimeField() | ||||||
|  |  | ||||||
|  |         md = MyDoc(dt='   ') | ||||||
|  |         self.assertRaises(ValidationError, md.save) | ||||||
|  |  | ||||||
|  |     def test_default_value_utcnow(self): | ||||||
|  |         """Ensure that default field values are used when creating | ||||||
|  |         a document. | ||||||
|  |         """ | ||||||
|  |         class Person(Document): | ||||||
|  |             created = DateTimeField(default=dt.datetime.utcnow) | ||||||
|  |  | ||||||
|  |         utcnow = dt.datetime.utcnow() | ||||||
|  |         person = Person() | ||||||
|  |         person.validate() | ||||||
|  |         person_created_t0 = person.created | ||||||
|  |         self.assertLess(person.created - utcnow, dt.timedelta(seconds=1)) | ||||||
|  |         self.assertEqual(person_created_t0, person.created)  # make sure it does not change | ||||||
|  |         self.assertEqual(person._data['created'], person.created) | ||||||
|  |  | ||||||
|  |     def test_handling_microseconds(self): | ||||||
|  |         """Tests showing pymongo datetime fields handling of microseconds. | ||||||
|  |         Microseconds are rounded to the nearest millisecond and pre UTC | ||||||
|  |         handling is wonky. | ||||||
|  |  | ||||||
|  |         See: http://api.mongodb.org/python/current/api/bson/son.html#dt | ||||||
|  |         """ | ||||||
|  |         class LogEntry(Document): | ||||||
|  |             date = DateTimeField() | ||||||
|  |  | ||||||
|  |         LogEntry.drop_collection() | ||||||
|  |  | ||||||
|  |         # Test can save dates | ||||||
|  |         log = LogEntry() | ||||||
|  |         log.date = dt.date.today() | ||||||
|  |         log.save() | ||||||
|  |         log.reload() | ||||||
|  |         self.assertEqual(log.date.date(), dt.date.today()) | ||||||
|  |  | ||||||
|  |         # Post UTC - microseconds are rounded (down) nearest millisecond and | ||||||
|  |         # dropped | ||||||
|  |         d1 = dt.datetime(1970, 1, 1, 0, 0, 1, 999) | ||||||
|  |         d2 = dt.datetime(1970, 1, 1, 0, 0, 1) | ||||||
|  |         log = LogEntry() | ||||||
|  |         log.date = d1 | ||||||
|  |         log.save() | ||||||
|  |         log.reload() | ||||||
|  |         self.assertNotEqual(log.date, d1) | ||||||
|  |         self.assertEqual(log.date, d2) | ||||||
|  |  | ||||||
|  |         # Post UTC - microseconds are rounded (down) nearest millisecond | ||||||
|  |         d1 = dt.datetime(1970, 1, 1, 0, 0, 1, 9999) | ||||||
|  |         d2 = dt.datetime(1970, 1, 1, 0, 0, 1, 9000) | ||||||
|  |         log.date = d1 | ||||||
|  |         log.save() | ||||||
|  |         log.reload() | ||||||
|  |         self.assertNotEqual(log.date, d1) | ||||||
|  |         self.assertEqual(log.date, d2) | ||||||
|  |  | ||||||
|  |         if not six.PY3: | ||||||
|  |             # Pre UTC dates microseconds below 1000 are dropped | ||||||
|  |             # This does not seem to be true in PY3 | ||||||
|  |             d1 = dt.datetime(1969, 12, 31, 23, 59, 59, 999) | ||||||
|  |             d2 = dt.datetime(1969, 12, 31, 23, 59, 59) | ||||||
|  |             log.date = d1 | ||||||
|  |             log.save() | ||||||
|  |             log.reload() | ||||||
|  |             self.assertNotEqual(log.date, d1) | ||||||
|  |             self.assertEqual(log.date, d2) | ||||||
|  |  | ||||||
|  |     def test_regular_usage(self): | ||||||
|  |         """Tests for regular datetime fields""" | ||||||
|  |         class LogEntry(Document): | ||||||
|  |             date = DateTimeField() | ||||||
|  |  | ||||||
|  |         LogEntry.drop_collection() | ||||||
|  |  | ||||||
|  |         d1 = dt.datetime(1970, 1, 1, 0, 0, 1) | ||||||
|  |         log = LogEntry() | ||||||
|  |         log.date = d1 | ||||||
|  |         log.validate() | ||||||
|  |         log.save() | ||||||
|  |  | ||||||
|  |         for query in (d1, d1.isoformat(' ')): | ||||||
|  |             log1 = LogEntry.objects.get(date=query) | ||||||
|  |             self.assertEqual(log, log1) | ||||||
|  |  | ||||||
|  |         if dateutil: | ||||||
|  |             log1 = LogEntry.objects.get(date=d1.isoformat('T')) | ||||||
|  |             self.assertEqual(log, log1) | ||||||
|  |  | ||||||
|  |         # create additional 19 log entries for a total of 20 | ||||||
|  |         for i in range(1971, 1990): | ||||||
|  |             d = dt.datetime(i, 1, 1, 0, 0, 1) | ||||||
|  |             LogEntry(date=d).save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(LogEntry.objects.count(), 20) | ||||||
|  |  | ||||||
|  |         # Test ordering | ||||||
|  |         logs = LogEntry.objects.order_by("date") | ||||||
|  |         i = 0 | ||||||
|  |         while i < 19: | ||||||
|  |             self.assertTrue(logs[i].date <= logs[i + 1].date) | ||||||
|  |             i += 1 | ||||||
|  |  | ||||||
|  |         logs = LogEntry.objects.order_by("-date") | ||||||
|  |         i = 0 | ||||||
|  |         while i < 19: | ||||||
|  |             self.assertTrue(logs[i].date >= logs[i + 1].date) | ||||||
|  |             i += 1 | ||||||
|  |  | ||||||
|  |         # Test searching | ||||||
|  |         logs = LogEntry.objects.filter(date__gte=dt.datetime(1980, 1, 1)) | ||||||
|  |         self.assertEqual(logs.count(), 10) | ||||||
|  |  | ||||||
|  |         logs = LogEntry.objects.filter(date__lte=dt.datetime(1980, 1, 1)) | ||||||
|  |         self.assertEqual(logs.count(), 10) | ||||||
|  |  | ||||||
|  |         logs = LogEntry.objects.filter( | ||||||
|  |             date__lte=dt.datetime(1980, 1, 1), | ||||||
|  |             date__gte=dt.datetime(1975, 1, 1), | ||||||
|  |         ) | ||||||
|  |         self.assertEqual(logs.count(), 5) | ||||||
|  |  | ||||||
|  |     def test_datetime_validation(self): | ||||||
|  |         """Ensure that invalid values cannot be assigned to datetime | ||||||
|  |         fields. | ||||||
|  |         """ | ||||||
|  |         class LogEntry(Document): | ||||||
|  |             time = DateTimeField() | ||||||
|  |  | ||||||
|  |         log = LogEntry() | ||||||
|  |         log.time = dt.datetime.now() | ||||||
|  |         log.validate() | ||||||
|  |  | ||||||
|  |         log.time = dt.date.today() | ||||||
|  |         log.validate() | ||||||
|  |  | ||||||
|  |         log.time = dt.datetime.now().isoformat(' ') | ||||||
|  |         log.validate() | ||||||
|  |  | ||||||
|  |         log.time = '2019-05-16 21:42:57.897847' | ||||||
|  |         log.validate() | ||||||
|  |  | ||||||
|  |         if dateutil: | ||||||
|  |             log.time = dt.datetime.now().isoformat('T') | ||||||
|  |             log.validate() | ||||||
|  |  | ||||||
|  |         log.time = -1 | ||||||
|  |         self.assertRaises(ValidationError, log.validate) | ||||||
|  |         log.time = 'ABC' | ||||||
|  |         self.assertRaises(ValidationError, log.validate) | ||||||
|  |         log.time = '2019-05-16 21:GARBAGE:12' | ||||||
|  |         self.assertRaises(ValidationError, log.validate) | ||||||
|  |         log.time = '2019-05-16 21:42:57.GARBAGE' | ||||||
|  |         self.assertRaises(ValidationError, log.validate) | ||||||
|  |         log.time = '2019-05-16 21:42:57.123.456' | ||||||
|  |         self.assertRaises(ValidationError, log.validate) | ||||||
|  |  | ||||||
|  |     def test_parse_datetime_as_str(self): | ||||||
|  |         class DTDoc(Document): | ||||||
|  |             date = DateTimeField() | ||||||
|  |  | ||||||
|  |         date_str = '2019-03-02 22:26:01' | ||||||
|  |  | ||||||
|  |         # make sure that passing a parsable datetime works | ||||||
|  |         dtd = DTDoc() | ||||||
|  |         dtd.date = date_str | ||||||
|  |         self.assertIsInstance(dtd.date, six.string_types) | ||||||
|  |         dtd.save() | ||||||
|  |         dtd.reload() | ||||||
|  |  | ||||||
|  |         self.assertIsInstance(dtd.date, dt.datetime) | ||||||
|  |         self.assertEqual(str(dtd.date), date_str) | ||||||
|  |  | ||||||
|  |         dtd.date = 'January 1st, 9999999999' | ||||||
|  |         self.assertRaises(ValidationError, dtd.validate) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestDateTimeTzAware(MongoDBTestCase): | ||||||
|  |     def test_datetime_tz_aware_mark_as_changed(self): | ||||||
|  |         # Reset the connections | ||||||
|  |         connection._connection_settings = {} | ||||||
|  |         connection._connections = {} | ||||||
|  |         connection._dbs = {} | ||||||
|  |  | ||||||
|  |         connect(db='mongoenginetest', tz_aware=True) | ||||||
|  |  | ||||||
|  |         class LogEntry(Document): | ||||||
|  |             time = DateTimeField() | ||||||
|  |  | ||||||
|  |         LogEntry.drop_collection() | ||||||
|  |  | ||||||
|  |         LogEntry(time=dt.datetime(2013, 1, 1, 0, 0, 0)).save() | ||||||
|  |  | ||||||
|  |         log = LogEntry.objects.first() | ||||||
|  |         log.time = dt.datetime(2013, 1, 1, 0, 0, 0) | ||||||
|  |         self.assertEqual(['time'], log._changed_fields) | ||||||
							
								
								
									
										91
									
								
								tests/fields/test_decimal_field.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										91
									
								
								tests/fields/test_decimal_field.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,91 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | from decimal import Decimal | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  |  | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestDecimalField(MongoDBTestCase): | ||||||
|  |  | ||||||
|  |     def test_validation(self): | ||||||
|  |         """Ensure that invalid values cannot be assigned to decimal fields. | ||||||
|  |         """ | ||||||
|  |         class Person(Document): | ||||||
|  |             height = DecimalField(min_value=Decimal('0.1'), | ||||||
|  |                                   max_value=Decimal('3.5')) | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         Person(height=Decimal('1.89')).save() | ||||||
|  |         person = Person.objects.first() | ||||||
|  |         self.assertEqual(person.height, Decimal('1.89')) | ||||||
|  |  | ||||||
|  |         person.height = '2.0' | ||||||
|  |         person.save() | ||||||
|  |         person.height = 0.01 | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |         person.height = Decimal('0.01') | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |         person.height = Decimal('4.0') | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |         person.height = 'something invalid' | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |  | ||||||
|  |         person_2 = Person(height='something invalid') | ||||||
|  |         self.assertRaises(ValidationError, person_2.validate) | ||||||
|  |  | ||||||
|  |     def test_comparison(self): | ||||||
|  |         class Person(Document): | ||||||
|  |             money = DecimalField() | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         Person(money=6).save() | ||||||
|  |         Person(money=7).save() | ||||||
|  |         Person(money=8).save() | ||||||
|  |         Person(money=10).save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(2, Person.objects(money__gt=Decimal("7")).count()) | ||||||
|  |         self.assertEqual(2, Person.objects(money__gt=7).count()) | ||||||
|  |         self.assertEqual(2, Person.objects(money__gt="7").count()) | ||||||
|  |  | ||||||
|  |         self.assertEqual(3, Person.objects(money__gte="7").count()) | ||||||
|  |  | ||||||
|  |     def test_storage(self): | ||||||
|  |         class Person(Document): | ||||||
|  |             float_value = DecimalField(precision=4) | ||||||
|  |             string_value = DecimalField(precision=4, force_string=True) | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |         values_to_store = [10, 10.1, 10.11, "10.111", Decimal("10.1111"), Decimal("10.11111")] | ||||||
|  |         for store_at_creation in [True, False]: | ||||||
|  |             for value in values_to_store: | ||||||
|  |                 # to_python is called explicitly if values were sent in the kwargs of __init__ | ||||||
|  |                 if store_at_creation: | ||||||
|  |                     Person(float_value=value, string_value=value).save() | ||||||
|  |                 else: | ||||||
|  |                     person = Person.objects.create() | ||||||
|  |                     person.float_value = value | ||||||
|  |                     person.string_value = value | ||||||
|  |                     person.save() | ||||||
|  |  | ||||||
|  |         # How its stored | ||||||
|  |         expected = [ | ||||||
|  |             {'float_value': 10.0, 'string_value': '10.0000'}, | ||||||
|  |             {'float_value': 10.1, 'string_value': '10.1000'}, | ||||||
|  |             {'float_value': 10.11, 'string_value': '10.1100'}, | ||||||
|  |             {'float_value': 10.111, 'string_value': '10.1110'}, | ||||||
|  |             {'float_value': 10.1111, 'string_value': '10.1111'}, | ||||||
|  |             {'float_value': 10.1111, 'string_value': '10.1111'}] | ||||||
|  |         expected.extend(expected) | ||||||
|  |         actual = list(Person.objects.exclude('id').as_pymongo()) | ||||||
|  |         self.assertEqual(expected, actual) | ||||||
|  |  | ||||||
|  |         # How it comes out locally | ||||||
|  |         expected = [Decimal('10.0000'), Decimal('10.1000'), Decimal('10.1100'), | ||||||
|  |                     Decimal('10.1110'), Decimal('10.1111'), Decimal('10.1111')] | ||||||
|  |         expected.extend(expected) | ||||||
|  |         for field_name in ['float_value', 'string_value']: | ||||||
|  |             actual = list(Person.objects().scalar(field_name)) | ||||||
|  |             self.assertEqual(expected, actual) | ||||||
							
								
								
									
										324
									
								
								tests/fields/test_dict_field.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										324
									
								
								tests/fields/test_dict_field.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,324 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | from mongoengine import * | ||||||
|  | from mongoengine.base import BaseDict | ||||||
|  |  | ||||||
|  | from tests.utils import MongoDBTestCase, get_as_pymongo | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestDictField(MongoDBTestCase): | ||||||
|  |  | ||||||
|  |     def test_storage(self): | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             info = DictField() | ||||||
|  |  | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         info = {'testkey': 'testvalue'} | ||||||
|  |         post = BlogPost(info=info).save() | ||||||
|  |         self.assertEqual( | ||||||
|  |             get_as_pymongo(post), | ||||||
|  |             { | ||||||
|  |                 '_id': post.id, | ||||||
|  |                 'info': info | ||||||
|  |             } | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |     def test_general_things(self): | ||||||
|  |         """Ensure that dict types work as expected.""" | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             info = DictField() | ||||||
|  |  | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         post = BlogPost() | ||||||
|  |         post.info = 'my post' | ||||||
|  |         self.assertRaises(ValidationError, post.validate) | ||||||
|  |  | ||||||
|  |         post.info = ['test', 'test'] | ||||||
|  |         self.assertRaises(ValidationError, post.validate) | ||||||
|  |  | ||||||
|  |         post.info = {'$title': 'test'} | ||||||
|  |         self.assertRaises(ValidationError, post.validate) | ||||||
|  |  | ||||||
|  |         post.info = {'nested': {'$title': 'test'}} | ||||||
|  |         self.assertRaises(ValidationError, post.validate) | ||||||
|  |  | ||||||
|  |         post.info = {'the.title': 'test'} | ||||||
|  |         self.assertRaises(ValidationError, post.validate) | ||||||
|  |  | ||||||
|  |         post.info = {'nested': {'the.title': 'test'}} | ||||||
|  |         self.assertRaises(ValidationError, post.validate) | ||||||
|  |  | ||||||
|  |         post.info = {1: 'test'} | ||||||
|  |         self.assertRaises(ValidationError, post.validate) | ||||||
|  |  | ||||||
|  |         post.info = {'title': 'test'} | ||||||
|  |         post.save() | ||||||
|  |  | ||||||
|  |         post = BlogPost() | ||||||
|  |         post.info = {'title': 'dollar_sign', 'details': {'te$t': 'test'}} | ||||||
|  |         post.save() | ||||||
|  |  | ||||||
|  |         post = BlogPost() | ||||||
|  |         post.info = {'details': {'test': 'test'}} | ||||||
|  |         post.save() | ||||||
|  |  | ||||||
|  |         post = BlogPost() | ||||||
|  |         post.info = {'details': {'test': 3}} | ||||||
|  |         post.save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(BlogPost.objects.count(), 4) | ||||||
|  |         self.assertEqual( | ||||||
|  |             BlogPost.objects.filter(info__title__exact='test').count(), 1) | ||||||
|  |         self.assertEqual( | ||||||
|  |             BlogPost.objects.filter(info__details__test__exact='test').count(), 1) | ||||||
|  |  | ||||||
|  |         post = BlogPost.objects.filter(info__title__exact='dollar_sign').first() | ||||||
|  |         self.assertIn('te$t', post['info']['details']) | ||||||
|  |  | ||||||
|  |         # Confirm handles non strings or non existing keys | ||||||
|  |         self.assertEqual( | ||||||
|  |             BlogPost.objects.filter(info__details__test__exact=5).count(), 0) | ||||||
|  |         self.assertEqual( | ||||||
|  |             BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0) | ||||||
|  |  | ||||||
|  |         post = BlogPost.objects.create(info={'title': 'original'}) | ||||||
|  |         post.info.update({'title': 'updated'}) | ||||||
|  |         post.save() | ||||||
|  |         post.reload() | ||||||
|  |         self.assertEqual('updated', post.info['title']) | ||||||
|  |  | ||||||
|  |         post.info.setdefault('authors', []) | ||||||
|  |         post.save() | ||||||
|  |         post.reload() | ||||||
|  |         self.assertEqual([], post.info['authors']) | ||||||
|  |  | ||||||
|  |     def test_dictfield_dump_document(self): | ||||||
|  |         """Ensure a DictField can handle another document's dump.""" | ||||||
|  |         class Doc(Document): | ||||||
|  |             field = DictField() | ||||||
|  |  | ||||||
|  |         class ToEmbed(Document): | ||||||
|  |             id = IntField(primary_key=True, default=1) | ||||||
|  |             recursive = DictField() | ||||||
|  |  | ||||||
|  |         class ToEmbedParent(Document): | ||||||
|  |             id = IntField(primary_key=True, default=1) | ||||||
|  |             recursive = DictField() | ||||||
|  |  | ||||||
|  |             meta = {'allow_inheritance': True} | ||||||
|  |  | ||||||
|  |         class ToEmbedChild(ToEmbedParent): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         to_embed_recursive = ToEmbed(id=1).save() | ||||||
|  |         to_embed = ToEmbed( | ||||||
|  |             id=2, recursive=to_embed_recursive.to_mongo().to_dict()).save() | ||||||
|  |         doc = Doc(field=to_embed.to_mongo().to_dict()) | ||||||
|  |         doc.save() | ||||||
|  |         self.assertIsInstance(doc.field, dict) | ||||||
|  |         self.assertEqual(doc.field, {'_id': 2, 'recursive': {'_id': 1, 'recursive': {}}}) | ||||||
|  |         # Same thing with a Document with a _cls field | ||||||
|  |         to_embed_recursive = ToEmbedChild(id=1).save() | ||||||
|  |         to_embed_child = ToEmbedChild( | ||||||
|  |             id=2, recursive=to_embed_recursive.to_mongo().to_dict()).save() | ||||||
|  |         doc = Doc(field=to_embed_child.to_mongo().to_dict()) | ||||||
|  |         doc.save() | ||||||
|  |         self.assertIsInstance(doc.field, dict) | ||||||
|  |         expected = { | ||||||
|  |             '_id': 2, '_cls': 'ToEmbedParent.ToEmbedChild', | ||||||
|  |             'recursive': {'_id': 1, '_cls': 'ToEmbedParent.ToEmbedChild', 'recursive': {}} | ||||||
|  |         } | ||||||
|  |         self.assertEqual(doc.field, expected) | ||||||
|  |  | ||||||
|  |     def test_dictfield_strict(self): | ||||||
|  |         """Ensure that dict field handles validation if provided a strict field type.""" | ||||||
|  |         class Simple(Document): | ||||||
|  |             mapping = DictField(field=IntField()) | ||||||
|  |  | ||||||
|  |         Simple.drop_collection() | ||||||
|  |  | ||||||
|  |         e = Simple() | ||||||
|  |         e.mapping['someint'] = 1 | ||||||
|  |         e.save() | ||||||
|  |  | ||||||
|  |         # try creating an invalid mapping | ||||||
|  |         with self.assertRaises(ValidationError): | ||||||
|  |             e.mapping['somestring'] = "abc" | ||||||
|  |             e.save() | ||||||
|  |  | ||||||
|  |     def test_dictfield_complex(self): | ||||||
|  |         """Ensure that the dict field can handle the complex types.""" | ||||||
|  |         class SettingBase(EmbeddedDocument): | ||||||
|  |             meta = {'allow_inheritance': True} | ||||||
|  |  | ||||||
|  |         class StringSetting(SettingBase): | ||||||
|  |             value = StringField() | ||||||
|  |  | ||||||
|  |         class IntegerSetting(SettingBase): | ||||||
|  |             value = IntField() | ||||||
|  |  | ||||||
|  |         class Simple(Document): | ||||||
|  |             mapping = DictField() | ||||||
|  |  | ||||||
|  |         Simple.drop_collection() | ||||||
|  |  | ||||||
|  |         e = Simple() | ||||||
|  |         e.mapping['somestring'] = StringSetting(value='foo') | ||||||
|  |         e.mapping['someint'] = IntegerSetting(value=42) | ||||||
|  |         e.mapping['nested_dict'] = {'number': 1, 'string': 'Hi!', | ||||||
|  |                                     'float': 1.001, | ||||||
|  |                                     'complex': IntegerSetting(value=42), | ||||||
|  |                                     'list': [IntegerSetting(value=42), | ||||||
|  |                                              StringSetting(value='foo')]} | ||||||
|  |         e.save() | ||||||
|  |  | ||||||
|  |         e2 = Simple.objects.get(id=e.id) | ||||||
|  |         self.assertIsInstance(e2.mapping['somestring'], StringSetting) | ||||||
|  |         self.assertIsInstance(e2.mapping['someint'], IntegerSetting) | ||||||
|  |  | ||||||
|  |         # Test querying | ||||||
|  |         self.assertEqual( | ||||||
|  |             Simple.objects.filter(mapping__someint__value=42).count(), 1) | ||||||
|  |         self.assertEqual( | ||||||
|  |             Simple.objects.filter(mapping__nested_dict__number=1).count(), 1) | ||||||
|  |         self.assertEqual( | ||||||
|  |             Simple.objects.filter(mapping__nested_dict__complex__value=42).count(), 1) | ||||||
|  |         self.assertEqual( | ||||||
|  |             Simple.objects.filter(mapping__nested_dict__list__0__value=42).count(), 1) | ||||||
|  |         self.assertEqual( | ||||||
|  |             Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 1) | ||||||
|  |  | ||||||
|  |         # Confirm can update | ||||||
|  |         Simple.objects().update( | ||||||
|  |             set__mapping={"someint": IntegerSetting(value=10)}) | ||||||
|  |         Simple.objects().update( | ||||||
|  |             set__mapping__nested_dict__list__1=StringSetting(value='Boo')) | ||||||
|  |         self.assertEqual( | ||||||
|  |             Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 0) | ||||||
|  |         self.assertEqual( | ||||||
|  |             Simple.objects.filter(mapping__nested_dict__list__1__value='Boo').count(), 1) | ||||||
|  |  | ||||||
|  |     def test_push_dict(self): | ||||||
|  |         class MyModel(Document): | ||||||
|  |             events = ListField(DictField()) | ||||||
|  |  | ||||||
|  |         doc = MyModel(events=[{'a': 1}]).save() | ||||||
|  |         raw_doc = get_as_pymongo(doc) | ||||||
|  |         expected_raw_doc = { | ||||||
|  |             '_id': doc.id, | ||||||
|  |             'events': [{'a': 1}] | ||||||
|  |         } | ||||||
|  |         self.assertEqual(raw_doc, expected_raw_doc) | ||||||
|  |  | ||||||
|  |         MyModel.objects(id=doc.id).update(push__events={}) | ||||||
|  |         raw_doc = get_as_pymongo(doc) | ||||||
|  |         expected_raw_doc = { | ||||||
|  |             '_id': doc.id, | ||||||
|  |             'events': [{'a': 1}, {}] | ||||||
|  |         } | ||||||
|  |         self.assertEqual(raw_doc, expected_raw_doc) | ||||||
|  |  | ||||||
|  |     def test_ensure_unique_default_instances(self): | ||||||
|  |         """Ensure that every field has it's own unique default instance.""" | ||||||
|  |         class D(Document): | ||||||
|  |             data = DictField() | ||||||
|  |             data2 = DictField(default=lambda: {}) | ||||||
|  |  | ||||||
|  |         d1 = D() | ||||||
|  |         d1.data['foo'] = 'bar' | ||||||
|  |         d1.data2['foo'] = 'bar' | ||||||
|  |         d2 = D() | ||||||
|  |         self.assertEqual(d2.data, {}) | ||||||
|  |         self.assertEqual(d2.data2, {}) | ||||||
|  |  | ||||||
|  |     def test_dict_field_invalid_dict_value(self): | ||||||
|  |         class DictFieldTest(Document): | ||||||
|  |             dictionary = DictField(required=True) | ||||||
|  |  | ||||||
|  |         DictFieldTest.drop_collection() | ||||||
|  |  | ||||||
|  |         test = DictFieldTest(dictionary=None) | ||||||
|  |         test.dictionary  # Just access to test getter | ||||||
|  |         self.assertRaises(ValidationError, test.validate) | ||||||
|  |  | ||||||
|  |         test = DictFieldTest(dictionary=False) | ||||||
|  |         test.dictionary  # Just access to test getter | ||||||
|  |         self.assertRaises(ValidationError, test.validate) | ||||||
|  |  | ||||||
|  |     def test_dict_field_raises_validation_error_if_wrongly_assign_embedded_doc(self): | ||||||
|  |         class DictFieldTest(Document): | ||||||
|  |             dictionary = DictField(required=True) | ||||||
|  |  | ||||||
|  |         DictFieldTest.drop_collection() | ||||||
|  |  | ||||||
|  |         class Embedded(EmbeddedDocument): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         embed = Embedded(name='garbage') | ||||||
|  |         doc = DictFieldTest(dictionary=embed) | ||||||
|  |         with self.assertRaises(ValidationError) as ctx_err: | ||||||
|  |             doc.validate() | ||||||
|  |         self.assertIn("'dictionary'", str(ctx_err.exception)) | ||||||
|  |         self.assertIn('Only dictionaries may be used in a DictField', str(ctx_err.exception)) | ||||||
|  |  | ||||||
|  |     def test_atomic_update_dict_field(self): | ||||||
|  |         """Ensure that the entire DictField can be atomically updated.""" | ||||||
|  |         class Simple(Document): | ||||||
|  |             mapping = DictField(field=ListField(IntField(required=True))) | ||||||
|  |  | ||||||
|  |         Simple.drop_collection() | ||||||
|  |  | ||||||
|  |         e = Simple() | ||||||
|  |         e.mapping['someints'] = [1, 2] | ||||||
|  |         e.save() | ||||||
|  |         e.update(set__mapping={"ints": [3, 4]}) | ||||||
|  |         e.reload() | ||||||
|  |         self.assertEqual(BaseDict, type(e.mapping)) | ||||||
|  |         self.assertEqual({"ints": [3, 4]}, e.mapping) | ||||||
|  |  | ||||||
|  |         # try creating an invalid mapping | ||||||
|  |         with self.assertRaises(ValueError): | ||||||
|  |             e.update(set__mapping={"somestrings": ["foo", "bar", ]}) | ||||||
|  |  | ||||||
|  |     def test_dictfield_with_referencefield_complex_nesting_cases(self): | ||||||
|  |         """Ensure complex nesting inside DictField handles dereferencing of ReferenceField(dbref=True | False)""" | ||||||
|  |         # Relates to Issue #1453 | ||||||
|  |         class Doc(Document): | ||||||
|  |             s = StringField() | ||||||
|  |  | ||||||
|  |         class Simple(Document): | ||||||
|  |             mapping0 = DictField(ReferenceField(Doc, dbref=True)) | ||||||
|  |             mapping1 = DictField(ReferenceField(Doc, dbref=False)) | ||||||
|  |             mapping2 = DictField(ListField(ReferenceField(Doc, dbref=True))) | ||||||
|  |             mapping3 = DictField(ListField(ReferenceField(Doc, dbref=False))) | ||||||
|  |             mapping4 = DictField(DictField(field=ReferenceField(Doc, dbref=True))) | ||||||
|  |             mapping5 = DictField(DictField(field=ReferenceField(Doc, dbref=False))) | ||||||
|  |             mapping6 = DictField(ListField(DictField(ReferenceField(Doc, dbref=True)))) | ||||||
|  |             mapping7 = DictField(ListField(DictField(ReferenceField(Doc, dbref=False)))) | ||||||
|  |             mapping8 = DictField(ListField(DictField(ListField(ReferenceField(Doc, dbref=True))))) | ||||||
|  |             mapping9 = DictField(ListField(DictField(ListField(ReferenceField(Doc, dbref=False))))) | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |         Simple.drop_collection() | ||||||
|  |  | ||||||
|  |         d = Doc(s='aa').save() | ||||||
|  |         e = Simple() | ||||||
|  |         e.mapping0['someint'] = e.mapping1['someint'] = d | ||||||
|  |         e.mapping2['someint'] = e.mapping3['someint'] = [d] | ||||||
|  |         e.mapping4['someint'] = e.mapping5['someint'] = {'d': d} | ||||||
|  |         e.mapping6['someint'] = e.mapping7['someint'] = [{'d': d}] | ||||||
|  |         e.mapping8['someint'] = e.mapping9['someint'] = [{'d': [d]}] | ||||||
|  |         e.save() | ||||||
|  |  | ||||||
|  |         s = Simple.objects.first() | ||||||
|  |         self.assertIsInstance(s.mapping0['someint'], Doc) | ||||||
|  |         self.assertIsInstance(s.mapping1['someint'], Doc) | ||||||
|  |         self.assertIsInstance(s.mapping2['someint'][0], Doc) | ||||||
|  |         self.assertIsInstance(s.mapping3['someint'][0], Doc) | ||||||
|  |         self.assertIsInstance(s.mapping4['someint']['d'], Doc) | ||||||
|  |         self.assertIsInstance(s.mapping5['someint']['d'], Doc) | ||||||
|  |         self.assertIsInstance(s.mapping6['someint'][0]['d'], Doc) | ||||||
|  |         self.assertIsInstance(s.mapping7['someint'][0]['d'], Doc) | ||||||
|  |         self.assertIsInstance(s.mapping8['someint'][0]['d'][0], Doc) | ||||||
|  |         self.assertIsInstance(s.mapping9['someint'][0]['d'][0], Doc) | ||||||
							
								
								
									
										130
									
								
								tests/fields/test_email_field.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										130
									
								
								tests/fields/test_email_field.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,130 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | import sys | ||||||
|  | from unittest import SkipTest | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  |  | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestEmailField(MongoDBTestCase): | ||||||
|  |     def test_generic_behavior(self): | ||||||
|  |         class User(Document): | ||||||
|  |             email = EmailField() | ||||||
|  |  | ||||||
|  |         user = User(email='ross@example.com') | ||||||
|  |         user.validate() | ||||||
|  |  | ||||||
|  |         user = User(email='ross@example.co.uk') | ||||||
|  |         user.validate() | ||||||
|  |  | ||||||
|  |         user = User(email=('Kofq@rhom0e4klgauOhpbpNdogawnyIKvQS0wk2mjqrgGQ5S' | ||||||
|  |                            'aJIazqqWkm7.net')) | ||||||
|  |         user.validate() | ||||||
|  |  | ||||||
|  |         user = User(email='new-tld@example.technology') | ||||||
|  |         user.validate() | ||||||
|  |  | ||||||
|  |         user = User(email='ross@example.com.') | ||||||
|  |         self.assertRaises(ValidationError, user.validate) | ||||||
|  |  | ||||||
|  |         # unicode domain | ||||||
|  |         user = User(email=u'user@пример.рф') | ||||||
|  |         user.validate() | ||||||
|  |  | ||||||
|  |         # invalid unicode domain | ||||||
|  |         user = User(email=u'user@пример') | ||||||
|  |         self.assertRaises(ValidationError, user.validate) | ||||||
|  |  | ||||||
|  |         # invalid data type | ||||||
|  |         user = User(email=123) | ||||||
|  |         self.assertRaises(ValidationError, user.validate) | ||||||
|  |  | ||||||
|  |     def test_email_field_unicode_user(self): | ||||||
|  |         # Don't run this test on pypy3, which doesn't support unicode regex: | ||||||
|  |         # https://bitbucket.org/pypy/pypy/issues/1821/regular-expression-doesnt-find-unicode | ||||||
|  |         if sys.version_info[:2] == (3, 2): | ||||||
|  |             raise SkipTest('unicode email addresses are not supported on PyPy 3') | ||||||
|  |  | ||||||
|  |         class User(Document): | ||||||
|  |             email = EmailField() | ||||||
|  |  | ||||||
|  |         # unicode user shouldn't validate by default... | ||||||
|  |         user = User(email=u'Dörte@Sörensen.example.com') | ||||||
|  |         self.assertRaises(ValidationError, user.validate) | ||||||
|  |  | ||||||
|  |         # ...but it should be fine with allow_utf8_user set to True | ||||||
|  |         class User(Document): | ||||||
|  |             email = EmailField(allow_utf8_user=True) | ||||||
|  |  | ||||||
|  |         user = User(email=u'Dörte@Sörensen.example.com') | ||||||
|  |         user.validate() | ||||||
|  |  | ||||||
|  |     def test_email_field_domain_whitelist(self): | ||||||
|  |         class User(Document): | ||||||
|  |             email = EmailField() | ||||||
|  |  | ||||||
|  |         # localhost domain shouldn't validate by default... | ||||||
|  |         user = User(email='me@localhost') | ||||||
|  |         self.assertRaises(ValidationError, user.validate) | ||||||
|  |  | ||||||
|  |         # ...but it should be fine if it's whitelisted | ||||||
|  |         class User(Document): | ||||||
|  |             email = EmailField(domain_whitelist=['localhost']) | ||||||
|  |  | ||||||
|  |         user = User(email='me@localhost') | ||||||
|  |         user.validate() | ||||||
|  |  | ||||||
|  |     def test_email_domain_validation_fails_if_invalid_idn(self): | ||||||
|  |         class User(Document): | ||||||
|  |             email = EmailField() | ||||||
|  |  | ||||||
|  |         invalid_idn = '.google.com' | ||||||
|  |         user = User(email='me@%s' % invalid_idn) | ||||||
|  |         with self.assertRaises(ValidationError) as ctx_err: | ||||||
|  |             user.validate() | ||||||
|  |         self.assertIn("domain failed IDN encoding", str(ctx_err.exception)) | ||||||
|  |  | ||||||
|  |     def test_email_field_ip_domain(self): | ||||||
|  |         class User(Document): | ||||||
|  |             email = EmailField() | ||||||
|  |  | ||||||
|  |         valid_ipv4 = 'email@[127.0.0.1]' | ||||||
|  |         valid_ipv6 = 'email@[2001:dB8::1]' | ||||||
|  |         invalid_ip = 'email@[324.0.0.1]' | ||||||
|  |  | ||||||
|  |         # IP address as a domain shouldn't validate by default... | ||||||
|  |         user = User(email=valid_ipv4) | ||||||
|  |         self.assertRaises(ValidationError, user.validate) | ||||||
|  |  | ||||||
|  |         user = User(email=valid_ipv6) | ||||||
|  |         self.assertRaises(ValidationError, user.validate) | ||||||
|  |  | ||||||
|  |         user = User(email=invalid_ip) | ||||||
|  |         self.assertRaises(ValidationError, user.validate) | ||||||
|  |  | ||||||
|  |         # ...but it should be fine with allow_ip_domain set to True | ||||||
|  |         class User(Document): | ||||||
|  |             email = EmailField(allow_ip_domain=True) | ||||||
|  |  | ||||||
|  |         user = User(email=valid_ipv4) | ||||||
|  |         user.validate() | ||||||
|  |  | ||||||
|  |         user = User(email=valid_ipv6) | ||||||
|  |         user.validate() | ||||||
|  |  | ||||||
|  |         # invalid IP should still fail validation | ||||||
|  |         user = User(email=invalid_ip) | ||||||
|  |         self.assertRaises(ValidationError, user.validate) | ||||||
|  |  | ||||||
|  |     def test_email_field_honors_regex(self): | ||||||
|  |         class User(Document): | ||||||
|  |             email = EmailField(regex=r'\w+@example.com') | ||||||
|  |  | ||||||
|  |         # Fails regex validation | ||||||
|  |         user = User(email='me@foo.com') | ||||||
|  |         self.assertRaises(ValidationError, user.validate) | ||||||
|  |  | ||||||
|  |         # Passes regex validation | ||||||
|  |         user = User(email='me@example.com') | ||||||
|  |         self.assertIsNone(user.validate()) | ||||||
							
								
								
									
										344
									
								
								tests/fields/test_embedded_document_field.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										344
									
								
								tests/fields/test_embedded_document_field.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,344 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | from mongoengine import Document, StringField, ValidationError, EmbeddedDocument, EmbeddedDocumentField, \ | ||||||
|  |     InvalidQueryError, LookUpError, IntField, GenericEmbeddedDocumentField, ListField, EmbeddedDocumentListField, \ | ||||||
|  |     ReferenceField | ||||||
|  |  | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestEmbeddedDocumentField(MongoDBTestCase): | ||||||
|  |     def test___init___(self): | ||||||
|  |         class MyDoc(EmbeddedDocument): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         field = EmbeddedDocumentField(MyDoc) | ||||||
|  |         self.assertEqual(field.document_type_obj, MyDoc) | ||||||
|  |  | ||||||
|  |         field2 = EmbeddedDocumentField('MyDoc') | ||||||
|  |         self.assertEqual(field2.document_type_obj, 'MyDoc') | ||||||
|  |  | ||||||
|  |     def test___init___throw_error_if_document_type_is_not_EmbeddedDocument(self): | ||||||
|  |         with self.assertRaises(ValidationError): | ||||||
|  |             EmbeddedDocumentField(dict) | ||||||
|  |  | ||||||
|  |     def test_document_type_throw_error_if_not_EmbeddedDocument_subclass(self): | ||||||
|  |  | ||||||
|  |         class MyDoc(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         emb = EmbeddedDocumentField('MyDoc') | ||||||
|  |         with self.assertRaises(ValidationError) as ctx: | ||||||
|  |             emb.document_type | ||||||
|  |         self.assertIn('Invalid embedded document class provided to an EmbeddedDocumentField', str(ctx.exception)) | ||||||
|  |  | ||||||
|  |     def test_embedded_document_field_only_allow_subclasses_of_embedded_document(self): | ||||||
|  |         # Relates to #1661 | ||||||
|  |         class MyDoc(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         with self.assertRaises(ValidationError): | ||||||
|  |             class MyFailingDoc(Document): | ||||||
|  |                 emb = EmbeddedDocumentField(MyDoc) | ||||||
|  |  | ||||||
|  |         with self.assertRaises(ValidationError): | ||||||
|  |             class MyFailingdoc2(Document): | ||||||
|  |                 emb = EmbeddedDocumentField('MyDoc') | ||||||
|  |  | ||||||
|  |     def test_query_embedded_document_attribute(self): | ||||||
|  |         class AdminSettings(EmbeddedDocument): | ||||||
|  |             foo1 = StringField() | ||||||
|  |             foo2 = StringField() | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             settings = EmbeddedDocumentField(AdminSettings) | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         p = Person( | ||||||
|  |             settings=AdminSettings(foo1='bar1', foo2='bar2'), | ||||||
|  |             name='John', | ||||||
|  |         ).save() | ||||||
|  |  | ||||||
|  |         # Test non exiting attribute | ||||||
|  |         with self.assertRaises(InvalidQueryError) as ctx_err: | ||||||
|  |             Person.objects(settings__notexist='bar').first() | ||||||
|  |         self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"') | ||||||
|  |  | ||||||
|  |         with self.assertRaises(LookUpError): | ||||||
|  |             Person.objects.only('settings.notexist') | ||||||
|  |  | ||||||
|  |         # Test existing attribute | ||||||
|  |         self.assertEqual(Person.objects(settings__foo1='bar1').first().id, p.id) | ||||||
|  |         only_p = Person.objects.only('settings.foo1').first() | ||||||
|  |         self.assertEqual(only_p.settings.foo1, p.settings.foo1) | ||||||
|  |         self.assertIsNone(only_p.settings.foo2) | ||||||
|  |         self.assertIsNone(only_p.name) | ||||||
|  |  | ||||||
|  |         exclude_p = Person.objects.exclude('settings.foo1').first() | ||||||
|  |         self.assertIsNone(exclude_p.settings.foo1) | ||||||
|  |         self.assertEqual(exclude_p.settings.foo2, p.settings.foo2) | ||||||
|  |         self.assertEqual(exclude_p.name, p.name) | ||||||
|  |  | ||||||
|  |     def test_query_embedded_document_attribute_with_inheritance(self): | ||||||
|  |         class BaseSettings(EmbeddedDocument): | ||||||
|  |             meta = {'allow_inheritance': True} | ||||||
|  |             base_foo = StringField() | ||||||
|  |  | ||||||
|  |         class AdminSettings(BaseSettings): | ||||||
|  |             sub_foo = StringField() | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             settings = EmbeddedDocumentField(BaseSettings) | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         p = Person(settings=AdminSettings(base_foo='basefoo', sub_foo='subfoo')) | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         # Test non exiting attribute | ||||||
|  |         with self.assertRaises(InvalidQueryError) as ctx_err: | ||||||
|  |             self.assertEqual(Person.objects(settings__notexist='bar').first().id, p.id) | ||||||
|  |         self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"') | ||||||
|  |  | ||||||
|  |         # Test existing attribute | ||||||
|  |         self.assertEqual(Person.objects(settings__base_foo='basefoo').first().id, p.id) | ||||||
|  |         self.assertEqual(Person.objects(settings__sub_foo='subfoo').first().id, p.id) | ||||||
|  |  | ||||||
|  |         only_p = Person.objects.only('settings.base_foo', 'settings._cls').first() | ||||||
|  |         self.assertEqual(only_p.settings.base_foo, 'basefoo') | ||||||
|  |         self.assertIsNone(only_p.settings.sub_foo) | ||||||
|  |  | ||||||
|  |     def test_query_list_embedded_document_with_inheritance(self): | ||||||
|  |         class Post(EmbeddedDocument): | ||||||
|  |             title = StringField(max_length=120, required=True) | ||||||
|  |             meta = {'allow_inheritance': True} | ||||||
|  |  | ||||||
|  |         class TextPost(Post): | ||||||
|  |             content = StringField() | ||||||
|  |  | ||||||
|  |         class MoviePost(Post): | ||||||
|  |             author = StringField() | ||||||
|  |  | ||||||
|  |         class Record(Document): | ||||||
|  |             posts = ListField(EmbeddedDocumentField(Post)) | ||||||
|  |  | ||||||
|  |         record_movie = Record(posts=[MoviePost(author='John', title='foo')]).save() | ||||||
|  |         record_text = Record(posts=[TextPost(content='a', title='foo')]).save() | ||||||
|  |  | ||||||
|  |         records = list(Record.objects(posts__author=record_movie.posts[0].author)) | ||||||
|  |         self.assertEqual(len(records), 1) | ||||||
|  |         self.assertEqual(records[0].id, record_movie.id) | ||||||
|  |  | ||||||
|  |         records = list(Record.objects(posts__content=record_text.posts[0].content)) | ||||||
|  |         self.assertEqual(len(records), 1) | ||||||
|  |         self.assertEqual(records[0].id, record_text.id) | ||||||
|  |  | ||||||
|  |         self.assertEqual(Record.objects(posts__title='foo').count(), 2) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestGenericEmbeddedDocumentField(MongoDBTestCase): | ||||||
|  |  | ||||||
|  |     def test_generic_embedded_document(self): | ||||||
|  |         class Car(EmbeddedDocument): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Dish(EmbeddedDocument): | ||||||
|  |             food = StringField(required=True) | ||||||
|  |             number = IntField() | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             like = GenericEmbeddedDocumentField() | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         person = Person(name='Test User') | ||||||
|  |         person.like = Car(name='Fiat') | ||||||
|  |         person.save() | ||||||
|  |  | ||||||
|  |         person = Person.objects.first() | ||||||
|  |         self.assertIsInstance(person.like, Car) | ||||||
|  |  | ||||||
|  |         person.like = Dish(food="arroz", number=15) | ||||||
|  |         person.save() | ||||||
|  |  | ||||||
|  |         person = Person.objects.first() | ||||||
|  |         self.assertIsInstance(person.like, Dish) | ||||||
|  |  | ||||||
|  |     def test_generic_embedded_document_choices(self): | ||||||
|  |         """Ensure you can limit GenericEmbeddedDocument choices.""" | ||||||
|  |         class Car(EmbeddedDocument): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Dish(EmbeddedDocument): | ||||||
|  |             food = StringField(required=True) | ||||||
|  |             number = IntField() | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             like = GenericEmbeddedDocumentField(choices=(Dish,)) | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         person = Person(name='Test User') | ||||||
|  |         person.like = Car(name='Fiat') | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |  | ||||||
|  |         person.like = Dish(food="arroz", number=15) | ||||||
|  |         person.save() | ||||||
|  |  | ||||||
|  |         person = Person.objects.first() | ||||||
|  |         self.assertIsInstance(person.like, Dish) | ||||||
|  |  | ||||||
|  |     def test_generic_list_embedded_document_choices(self): | ||||||
|  |         """Ensure you can limit GenericEmbeddedDocument choices inside | ||||||
|  |         a list field. | ||||||
|  |         """ | ||||||
|  |         class Car(EmbeddedDocument): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Dish(EmbeddedDocument): | ||||||
|  |             food = StringField(required=True) | ||||||
|  |             number = IntField() | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             likes = ListField(GenericEmbeddedDocumentField(choices=(Dish,))) | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         person = Person(name='Test User') | ||||||
|  |         person.likes = [Car(name='Fiat')] | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |  | ||||||
|  |         person.likes = [Dish(food="arroz", number=15)] | ||||||
|  |         person.save() | ||||||
|  |  | ||||||
|  |         person = Person.objects.first() | ||||||
|  |         self.assertIsInstance(person.likes[0], Dish) | ||||||
|  |  | ||||||
|  |     def test_choices_validation_documents(self): | ||||||
|  |         """ | ||||||
|  |         Ensure fields with document choices validate given a valid choice. | ||||||
|  |         """ | ||||||
|  |         class UserComments(EmbeddedDocument): | ||||||
|  |             author = StringField() | ||||||
|  |             message = StringField() | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             comments = ListField( | ||||||
|  |                 GenericEmbeddedDocumentField(choices=(UserComments,)) | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         # Ensure Validation Passes | ||||||
|  |         BlogPost(comments=[ | ||||||
|  |             UserComments(author='user2', message='message2'), | ||||||
|  |         ]).save() | ||||||
|  |  | ||||||
|  |     def test_choices_validation_documents_invalid(self): | ||||||
|  |         """ | ||||||
|  |         Ensure fields with document choices validate given an invalid choice. | ||||||
|  |         This should throw a ValidationError exception. | ||||||
|  |         """ | ||||||
|  |         class UserComments(EmbeddedDocument): | ||||||
|  |             author = StringField() | ||||||
|  |             message = StringField() | ||||||
|  |  | ||||||
|  |         class ModeratorComments(EmbeddedDocument): | ||||||
|  |             author = StringField() | ||||||
|  |             message = StringField() | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             comments = ListField( | ||||||
|  |                 GenericEmbeddedDocumentField(choices=(UserComments,)) | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         # Single Entry Failure | ||||||
|  |         post = BlogPost(comments=[ | ||||||
|  |             ModeratorComments(author='mod1', message='message1'), | ||||||
|  |         ]) | ||||||
|  |         self.assertRaises(ValidationError, post.save) | ||||||
|  |  | ||||||
|  |         # Mixed Entry Failure | ||||||
|  |         post = BlogPost(comments=[ | ||||||
|  |             ModeratorComments(author='mod1', message='message1'), | ||||||
|  |             UserComments(author='user2', message='message2'), | ||||||
|  |         ]) | ||||||
|  |         self.assertRaises(ValidationError, post.save) | ||||||
|  |  | ||||||
|  |     def test_choices_validation_documents_inheritance(self): | ||||||
|  |         """ | ||||||
|  |         Ensure fields with document choices validate given subclass of choice. | ||||||
|  |         """ | ||||||
|  |         class Comments(EmbeddedDocument): | ||||||
|  |             meta = { | ||||||
|  |                 'abstract': True | ||||||
|  |             } | ||||||
|  |             author = StringField() | ||||||
|  |             message = StringField() | ||||||
|  |  | ||||||
|  |         class UserComments(Comments): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             comments = ListField( | ||||||
|  |                 GenericEmbeddedDocumentField(choices=(Comments,)) | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |         # Save Valid EmbeddedDocument Type | ||||||
|  |         BlogPost(comments=[ | ||||||
|  |             UserComments(author='user2', message='message2'), | ||||||
|  |         ]).save() | ||||||
|  |  | ||||||
|  |     def test_query_generic_embedded_document_attribute(self): | ||||||
|  |         class AdminSettings(EmbeddedDocument): | ||||||
|  |             foo1 = StringField() | ||||||
|  |  | ||||||
|  |         class NonAdminSettings(EmbeddedDocument): | ||||||
|  |             foo2 = StringField() | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             settings = GenericEmbeddedDocumentField(choices=(AdminSettings, NonAdminSettings)) | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         p1 = Person(settings=AdminSettings(foo1='bar1')).save() | ||||||
|  |         p2 = Person(settings=NonAdminSettings(foo2='bar2')).save() | ||||||
|  |  | ||||||
|  |         # Test non exiting attribute | ||||||
|  |         with self.assertRaises(InvalidQueryError) as ctx_err: | ||||||
|  |             Person.objects(settings__notexist='bar').first() | ||||||
|  |         self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"') | ||||||
|  |  | ||||||
|  |         with self.assertRaises(LookUpError): | ||||||
|  |             Person.objects.only('settings.notexist') | ||||||
|  |  | ||||||
|  |         # Test existing attribute | ||||||
|  |         self.assertEqual(Person.objects(settings__foo1='bar1').first().id, p1.id) | ||||||
|  |         self.assertEqual(Person.objects(settings__foo2='bar2').first().id, p2.id) | ||||||
|  |  | ||||||
|  |     def test_query_generic_embedded_document_attribute_with_inheritance(self): | ||||||
|  |         class BaseSettings(EmbeddedDocument): | ||||||
|  |             meta = {'allow_inheritance': True} | ||||||
|  |             base_foo = StringField() | ||||||
|  |  | ||||||
|  |         class AdminSettings(BaseSettings): | ||||||
|  |             sub_foo = StringField() | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             settings = GenericEmbeddedDocumentField(choices=[BaseSettings]) | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         p = Person(settings=AdminSettings(base_foo='basefoo', sub_foo='subfoo')) | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         # Test non exiting attribute | ||||||
|  |         with self.assertRaises(InvalidQueryError) as ctx_err: | ||||||
|  |             self.assertEqual(Person.objects(settings__notexist='bar').first().id, p.id) | ||||||
|  |         self.assertEqual(unicode(ctx_err.exception), u'Cannot resolve field "notexist"') | ||||||
|  |  | ||||||
|  |         # Test existing attribute | ||||||
|  |         self.assertEqual(Person.objects(settings__base_foo='basefoo').first().id, p.id) | ||||||
|  |         self.assertEqual(Person.objects(settings__sub_foo='subfoo').first().id, p.id) | ||||||
							
								
								
									
										58
									
								
								tests/fields/test_float_field.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										58
									
								
								tests/fields/test_float_field.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,58 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | import six | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  |  | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestFloatField(MongoDBTestCase): | ||||||
|  |  | ||||||
|  |     def test_float_ne_operator(self): | ||||||
|  |         class TestDocument(Document): | ||||||
|  |             float_fld = FloatField() | ||||||
|  |  | ||||||
|  |         TestDocument.drop_collection() | ||||||
|  |  | ||||||
|  |         TestDocument(float_fld=None).save() | ||||||
|  |         TestDocument(float_fld=1).save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(1, TestDocument.objects(float_fld__ne=None).count()) | ||||||
|  |         self.assertEqual(1, TestDocument.objects(float_fld__ne=1).count()) | ||||||
|  |  | ||||||
|  |     def test_validation(self): | ||||||
|  |         """Ensure that invalid values cannot be assigned to float fields. | ||||||
|  |         """ | ||||||
|  |         class Person(Document): | ||||||
|  |             height = FloatField(min_value=0.1, max_value=3.5) | ||||||
|  |  | ||||||
|  |         class BigPerson(Document): | ||||||
|  |             height = FloatField() | ||||||
|  |  | ||||||
|  |         person = Person() | ||||||
|  |         person.height = 1.89 | ||||||
|  |         person.validate() | ||||||
|  |  | ||||||
|  |         person.height = '2.0' | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |  | ||||||
|  |         person.height = 0.01 | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |  | ||||||
|  |         person.height = 4.0 | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |  | ||||||
|  |         person_2 = Person(height='something invalid') | ||||||
|  |         self.assertRaises(ValidationError, person_2.validate) | ||||||
|  |  | ||||||
|  |         big_person = BigPerson() | ||||||
|  |  | ||||||
|  |         for value, value_type in enumerate(six.integer_types): | ||||||
|  |             big_person.height = value_type(value) | ||||||
|  |             big_person.validate() | ||||||
|  |  | ||||||
|  |         big_person.height = 2 ** 500 | ||||||
|  |         big_person.validate() | ||||||
|  |  | ||||||
|  |         big_person.height = 2 ** 100000  # Too big for a float value | ||||||
|  |         self.assertRaises(ValidationError, big_person.validate) | ||||||
							
								
								
									
										42
									
								
								tests/fields/test_int_field.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										42
									
								
								tests/fields/test_int_field.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,42 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | from mongoengine import * | ||||||
|  |  | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestIntField(MongoDBTestCase): | ||||||
|  |  | ||||||
|  |     def test_int_validation(self): | ||||||
|  |         """Ensure that invalid values cannot be assigned to int fields. | ||||||
|  |         """ | ||||||
|  |         class Person(Document): | ||||||
|  |             age = IntField(min_value=0, max_value=110) | ||||||
|  |  | ||||||
|  |         person = Person() | ||||||
|  |         person.age = 0 | ||||||
|  |         person.validate() | ||||||
|  |  | ||||||
|  |         person.age = 50 | ||||||
|  |         person.validate() | ||||||
|  |  | ||||||
|  |         person.age = 110 | ||||||
|  |         person.validate() | ||||||
|  |  | ||||||
|  |         person.age = -1 | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |         person.age = 120 | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |         person.age = 'ten' | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |  | ||||||
|  |     def test_ne_operator(self): | ||||||
|  |         class TestDocument(Document): | ||||||
|  |             int_fld = IntField() | ||||||
|  |  | ||||||
|  |         TestDocument.drop_collection() | ||||||
|  |  | ||||||
|  |         TestDocument(int_fld=None).save() | ||||||
|  |         TestDocument(int_fld=1).save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(1, TestDocument.objects(int_fld__ne=None).count()) | ||||||
|  |         self.assertEqual(1, TestDocument.objects(int_fld__ne=1).count()) | ||||||
							
								
								
									
										570
									
								
								tests/fields/test_lazy_reference_field.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										570
									
								
								tests/fields/test_lazy_reference_field.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,570 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | from bson import DBRef, ObjectId | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  | from mongoengine.base import LazyReference | ||||||
|  |  | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestLazyReferenceField(MongoDBTestCase): | ||||||
|  |     def test_lazy_reference_config(self): | ||||||
|  |         # Make sure ReferenceField only accepts a document class or a string | ||||||
|  |         # with a document class name. | ||||||
|  |         self.assertRaises(ValidationError, LazyReferenceField, EmbeddedDocument) | ||||||
|  |  | ||||||
|  |     def test___repr__(self): | ||||||
|  |         class Animal(Document): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Ocurrence(Document): | ||||||
|  |             animal = LazyReferenceField(Animal) | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         Ocurrence.drop_collection() | ||||||
|  |  | ||||||
|  |         animal = Animal() | ||||||
|  |         oc = Ocurrence(animal=animal) | ||||||
|  |         self.assertIn('LazyReference', repr(oc.animal)) | ||||||
|  |  | ||||||
|  |     def test___getattr___unknown_attr_raises_attribute_error(self): | ||||||
|  |         class Animal(Document): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Ocurrence(Document): | ||||||
|  |             animal = LazyReferenceField(Animal) | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         Ocurrence.drop_collection() | ||||||
|  |  | ||||||
|  |         animal = Animal().save() | ||||||
|  |         oc = Ocurrence(animal=animal) | ||||||
|  |         with self.assertRaises(AttributeError): | ||||||
|  |             oc.animal.not_exist | ||||||
|  |  | ||||||
|  |     def test_lazy_reference_simple(self): | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             tag = StringField() | ||||||
|  |  | ||||||
|  |         class Ocurrence(Document): | ||||||
|  |             person = StringField() | ||||||
|  |             animal = LazyReferenceField(Animal) | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         Ocurrence.drop_collection() | ||||||
|  |  | ||||||
|  |         animal = Animal(name="Leopard", tag="heavy").save() | ||||||
|  |         Ocurrence(person="test", animal=animal).save() | ||||||
|  |         p = Ocurrence.objects.get() | ||||||
|  |         self.assertIsInstance(p.animal, LazyReference) | ||||||
|  |         fetched_animal = p.animal.fetch() | ||||||
|  |         self.assertEqual(fetched_animal, animal) | ||||||
|  |         # `fetch` keep cache on referenced document by default... | ||||||
|  |         animal.tag = "not so heavy" | ||||||
|  |         animal.save() | ||||||
|  |         double_fetch = p.animal.fetch() | ||||||
|  |         self.assertIs(fetched_animal, double_fetch) | ||||||
|  |         self.assertEqual(double_fetch.tag, "heavy") | ||||||
|  |         # ...unless specified otherwise | ||||||
|  |         fetch_force = p.animal.fetch(force=True) | ||||||
|  |         self.assertIsNot(fetch_force, fetched_animal) | ||||||
|  |         self.assertEqual(fetch_force.tag, "not so heavy") | ||||||
|  |  | ||||||
|  |     def test_lazy_reference_fetch_invalid_ref(self): | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             tag = StringField() | ||||||
|  |  | ||||||
|  |         class Ocurrence(Document): | ||||||
|  |             person = StringField() | ||||||
|  |             animal = LazyReferenceField(Animal) | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         Ocurrence.drop_collection() | ||||||
|  |  | ||||||
|  |         animal = Animal(name="Leopard", tag="heavy").save() | ||||||
|  |         Ocurrence(person="test", animal=animal).save() | ||||||
|  |         animal.delete() | ||||||
|  |         p = Ocurrence.objects.get() | ||||||
|  |         self.assertIsInstance(p.animal, LazyReference) | ||||||
|  |         with self.assertRaises(DoesNotExist): | ||||||
|  |             p.animal.fetch() | ||||||
|  |  | ||||||
|  |     def test_lazy_reference_set(self): | ||||||
|  |         class Animal(Document): | ||||||
|  |             meta = {'allow_inheritance': True} | ||||||
|  |  | ||||||
|  |             name = StringField() | ||||||
|  |             tag = StringField() | ||||||
|  |  | ||||||
|  |         class Ocurrence(Document): | ||||||
|  |             person = StringField() | ||||||
|  |             animal = LazyReferenceField(Animal) | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         Ocurrence.drop_collection() | ||||||
|  |  | ||||||
|  |         class SubAnimal(Animal): | ||||||
|  |             nick = StringField() | ||||||
|  |  | ||||||
|  |         animal = Animal(name="Leopard", tag="heavy").save() | ||||||
|  |         sub_animal = SubAnimal(nick='doggo', name='dog').save() | ||||||
|  |         for ref in ( | ||||||
|  |                 animal, | ||||||
|  |                 animal.pk, | ||||||
|  |                 DBRef(animal._get_collection_name(), animal.pk), | ||||||
|  |                 LazyReference(Animal, animal.pk), | ||||||
|  |  | ||||||
|  |                 sub_animal, | ||||||
|  |                 sub_animal.pk, | ||||||
|  |                 DBRef(sub_animal._get_collection_name(), sub_animal.pk), | ||||||
|  |                 LazyReference(SubAnimal, sub_animal.pk), | ||||||
|  |                 ): | ||||||
|  |             p = Ocurrence(person="test", animal=ref).save() | ||||||
|  |             p.reload() | ||||||
|  |             self.assertIsInstance(p.animal, LazyReference) | ||||||
|  |             p.animal.fetch() | ||||||
|  |  | ||||||
|  |     def test_lazy_reference_bad_set(self): | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             tag = StringField() | ||||||
|  |  | ||||||
|  |         class Ocurrence(Document): | ||||||
|  |             person = StringField() | ||||||
|  |             animal = LazyReferenceField(Animal) | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         Ocurrence.drop_collection() | ||||||
|  |  | ||||||
|  |         class BadDoc(Document): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         animal = Animal(name="Leopard", tag="heavy").save() | ||||||
|  |         baddoc = BadDoc().save() | ||||||
|  |         for bad in ( | ||||||
|  |                 42, | ||||||
|  |                 'foo', | ||||||
|  |                 baddoc, | ||||||
|  |                 DBRef(baddoc._get_collection_name(), animal.pk), | ||||||
|  |                 LazyReference(BadDoc, animal.pk) | ||||||
|  |                 ): | ||||||
|  |             with self.assertRaises(ValidationError): | ||||||
|  |                 p = Ocurrence(person="test", animal=bad).save() | ||||||
|  |  | ||||||
|  |     def test_lazy_reference_query_conversion(self): | ||||||
|  |         """Ensure that LazyReferenceFields can be queried using objects and values | ||||||
|  |         of the type of the primary key of the referenced object. | ||||||
|  |         """ | ||||||
|  |         class Member(Document): | ||||||
|  |             user_num = IntField(primary_key=True) | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             title = StringField() | ||||||
|  |             author = LazyReferenceField(Member, dbref=False) | ||||||
|  |  | ||||||
|  |         Member.drop_collection() | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         m1 = Member(user_num=1) | ||||||
|  |         m1.save() | ||||||
|  |         m2 = Member(user_num=2) | ||||||
|  |         m2.save() | ||||||
|  |  | ||||||
|  |         post1 = BlogPost(title='post 1', author=m1) | ||||||
|  |         post1.save() | ||||||
|  |  | ||||||
|  |         post2 = BlogPost(title='post 2', author=m2) | ||||||
|  |         post2.save() | ||||||
|  |  | ||||||
|  |         post = BlogPost.objects(author=m1).first() | ||||||
|  |         self.assertEqual(post.id, post1.id) | ||||||
|  |  | ||||||
|  |         post = BlogPost.objects(author=m2).first() | ||||||
|  |         self.assertEqual(post.id, post2.id) | ||||||
|  |  | ||||||
|  |         # Same thing by passing a LazyReference instance | ||||||
|  |         post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() | ||||||
|  |         self.assertEqual(post.id, post2.id) | ||||||
|  |  | ||||||
|  |     def test_lazy_reference_query_conversion_dbref(self): | ||||||
|  |         """Ensure that LazyReferenceFields can be queried using objects and values | ||||||
|  |         of the type of the primary key of the referenced object. | ||||||
|  |         """ | ||||||
|  |         class Member(Document): | ||||||
|  |             user_num = IntField(primary_key=True) | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             title = StringField() | ||||||
|  |             author = LazyReferenceField(Member, dbref=True) | ||||||
|  |  | ||||||
|  |         Member.drop_collection() | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         m1 = Member(user_num=1) | ||||||
|  |         m1.save() | ||||||
|  |         m2 = Member(user_num=2) | ||||||
|  |         m2.save() | ||||||
|  |  | ||||||
|  |         post1 = BlogPost(title='post 1', author=m1) | ||||||
|  |         post1.save() | ||||||
|  |  | ||||||
|  |         post2 = BlogPost(title='post 2', author=m2) | ||||||
|  |         post2.save() | ||||||
|  |  | ||||||
|  |         post = BlogPost.objects(author=m1).first() | ||||||
|  |         self.assertEqual(post.id, post1.id) | ||||||
|  |  | ||||||
|  |         post = BlogPost.objects(author=m2).first() | ||||||
|  |         self.assertEqual(post.id, post2.id) | ||||||
|  |  | ||||||
|  |         # Same thing by passing a LazyReference instance | ||||||
|  |         post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() | ||||||
|  |         self.assertEqual(post.id, post2.id) | ||||||
|  |  | ||||||
|  |     def test_lazy_reference_passthrough(self): | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             tag = StringField() | ||||||
|  |  | ||||||
|  |         class Ocurrence(Document): | ||||||
|  |             animal = LazyReferenceField(Animal, passthrough=False) | ||||||
|  |             animal_passthrough = LazyReferenceField(Animal, passthrough=True) | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         Ocurrence.drop_collection() | ||||||
|  |  | ||||||
|  |         animal = Animal(name="Leopard", tag="heavy").save() | ||||||
|  |         Ocurrence(animal=animal, animal_passthrough=animal).save() | ||||||
|  |         p = Ocurrence.objects.get() | ||||||
|  |         self.assertIsInstance(p.animal, LazyReference) | ||||||
|  |         with self.assertRaises(KeyError): | ||||||
|  |             p.animal['name'] | ||||||
|  |         with self.assertRaises(AttributeError): | ||||||
|  |             p.animal.name | ||||||
|  |         self.assertEqual(p.animal.pk, animal.pk) | ||||||
|  |  | ||||||
|  |         self.assertEqual(p.animal_passthrough.name, "Leopard") | ||||||
|  |         self.assertEqual(p.animal_passthrough['name'], "Leopard") | ||||||
|  |  | ||||||
|  |         # Should not be able to access referenced document's methods | ||||||
|  |         with self.assertRaises(AttributeError): | ||||||
|  |             p.animal.save | ||||||
|  |         with self.assertRaises(KeyError): | ||||||
|  |             p.animal['save'] | ||||||
|  |  | ||||||
|  |     def test_lazy_reference_not_set(self): | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             tag = StringField() | ||||||
|  |  | ||||||
|  |         class Ocurrence(Document): | ||||||
|  |             person = StringField() | ||||||
|  |             animal = LazyReferenceField(Animal) | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         Ocurrence.drop_collection() | ||||||
|  |  | ||||||
|  |         Ocurrence(person='foo').save() | ||||||
|  |         p = Ocurrence.objects.get() | ||||||
|  |         self.assertIs(p.animal, None) | ||||||
|  |  | ||||||
|  |     def test_lazy_reference_equality(self): | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             tag = StringField() | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |  | ||||||
|  |         animal = Animal(name="Leopard", tag="heavy").save() | ||||||
|  |         animalref = LazyReference(Animal, animal.pk) | ||||||
|  |         self.assertEqual(animal, animalref) | ||||||
|  |         self.assertEqual(animalref, animal) | ||||||
|  |  | ||||||
|  |         other_animalref = LazyReference(Animal, ObjectId("54495ad94c934721ede76f90")) | ||||||
|  |         self.assertNotEqual(animal, other_animalref) | ||||||
|  |         self.assertNotEqual(other_animalref, animal) | ||||||
|  |  | ||||||
|  |     def test_lazy_reference_embedded(self): | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             tag = StringField() | ||||||
|  |  | ||||||
|  |         class EmbeddedOcurrence(EmbeddedDocument): | ||||||
|  |             in_list = ListField(LazyReferenceField(Animal)) | ||||||
|  |             direct = LazyReferenceField(Animal) | ||||||
|  |  | ||||||
|  |         class Ocurrence(Document): | ||||||
|  |             in_list = ListField(LazyReferenceField(Animal)) | ||||||
|  |             in_embedded = EmbeddedDocumentField(EmbeddedOcurrence) | ||||||
|  |             direct = LazyReferenceField(Animal) | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         Ocurrence.drop_collection() | ||||||
|  |  | ||||||
|  |         animal1 = Animal('doggo').save() | ||||||
|  |         animal2 = Animal('cheeta').save() | ||||||
|  |  | ||||||
|  |         def check_fields_type(occ): | ||||||
|  |             self.assertIsInstance(occ.direct, LazyReference) | ||||||
|  |             for elem in occ.in_list: | ||||||
|  |                 self.assertIsInstance(elem, LazyReference) | ||||||
|  |             self.assertIsInstance(occ.in_embedded.direct, LazyReference) | ||||||
|  |             for elem in occ.in_embedded.in_list: | ||||||
|  |                 self.assertIsInstance(elem, LazyReference) | ||||||
|  |  | ||||||
|  |         occ = Ocurrence( | ||||||
|  |             in_list=[animal1, animal2], | ||||||
|  |             in_embedded={'in_list': [animal1, animal2], 'direct': animal1}, | ||||||
|  |             direct=animal1 | ||||||
|  |         ).save() | ||||||
|  |         check_fields_type(occ) | ||||||
|  |         occ.reload() | ||||||
|  |         check_fields_type(occ) | ||||||
|  |         occ.direct = animal1.id | ||||||
|  |         occ.in_list = [animal1.id, animal2.id] | ||||||
|  |         occ.in_embedded.direct = animal1.id | ||||||
|  |         occ.in_embedded.in_list = [animal1.id, animal2.id] | ||||||
|  |         check_fields_type(occ) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestGenericLazyReferenceField(MongoDBTestCase): | ||||||
|  |     def test_generic_lazy_reference_simple(self): | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             tag = StringField() | ||||||
|  |  | ||||||
|  |         class Ocurrence(Document): | ||||||
|  |             person = StringField() | ||||||
|  |             animal = GenericLazyReferenceField() | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         Ocurrence.drop_collection() | ||||||
|  |  | ||||||
|  |         animal = Animal(name="Leopard", tag="heavy").save() | ||||||
|  |         Ocurrence(person="test", animal=animal).save() | ||||||
|  |         p = Ocurrence.objects.get() | ||||||
|  |         self.assertIsInstance(p.animal, LazyReference) | ||||||
|  |         fetched_animal = p.animal.fetch() | ||||||
|  |         self.assertEqual(fetched_animal, animal) | ||||||
|  |         # `fetch` keep cache on referenced document by default... | ||||||
|  |         animal.tag = "not so heavy" | ||||||
|  |         animal.save() | ||||||
|  |         double_fetch = p.animal.fetch() | ||||||
|  |         self.assertIs(fetched_animal, double_fetch) | ||||||
|  |         self.assertEqual(double_fetch.tag, "heavy") | ||||||
|  |         # ...unless specified otherwise | ||||||
|  |         fetch_force = p.animal.fetch(force=True) | ||||||
|  |         self.assertIsNot(fetch_force, fetched_animal) | ||||||
|  |         self.assertEqual(fetch_force.tag, "not so heavy") | ||||||
|  |  | ||||||
|  |     def test_generic_lazy_reference_choices(self): | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Vegetal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Mineral(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Ocurrence(Document): | ||||||
|  |             living_thing = GenericLazyReferenceField(choices=[Animal, Vegetal]) | ||||||
|  |             thing = GenericLazyReferenceField() | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         Vegetal.drop_collection() | ||||||
|  |         Mineral.drop_collection() | ||||||
|  |         Ocurrence.drop_collection() | ||||||
|  |  | ||||||
|  |         animal = Animal(name="Leopard").save() | ||||||
|  |         vegetal = Vegetal(name="Oak").save() | ||||||
|  |         mineral = Mineral(name="Granite").save() | ||||||
|  |  | ||||||
|  |         occ_animal = Ocurrence(living_thing=animal, thing=animal).save() | ||||||
|  |         occ_vegetal = Ocurrence(living_thing=vegetal, thing=vegetal).save() | ||||||
|  |         with self.assertRaises(ValidationError): | ||||||
|  |             Ocurrence(living_thing=mineral).save() | ||||||
|  |  | ||||||
|  |         occ = Ocurrence.objects.get(living_thing=animal) | ||||||
|  |         self.assertEqual(occ, occ_animal) | ||||||
|  |         self.assertIsInstance(occ.thing, LazyReference) | ||||||
|  |         self.assertIsInstance(occ.living_thing, LazyReference) | ||||||
|  |  | ||||||
|  |         occ.thing = vegetal | ||||||
|  |         occ.living_thing = vegetal | ||||||
|  |         occ.save() | ||||||
|  |  | ||||||
|  |         occ.thing = mineral | ||||||
|  |         occ.living_thing = mineral | ||||||
|  |         with self.assertRaises(ValidationError): | ||||||
|  |             occ.save() | ||||||
|  |  | ||||||
|  |     def test_generic_lazy_reference_set(self): | ||||||
|  |         class Animal(Document): | ||||||
|  |             meta = {'allow_inheritance': True} | ||||||
|  |  | ||||||
|  |             name = StringField() | ||||||
|  |             tag = StringField() | ||||||
|  |  | ||||||
|  |         class Ocurrence(Document): | ||||||
|  |             person = StringField() | ||||||
|  |             animal = GenericLazyReferenceField() | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         Ocurrence.drop_collection() | ||||||
|  |  | ||||||
|  |         class SubAnimal(Animal): | ||||||
|  |             nick = StringField() | ||||||
|  |  | ||||||
|  |         animal = Animal(name="Leopard", tag="heavy").save() | ||||||
|  |         sub_animal = SubAnimal(nick='doggo', name='dog').save() | ||||||
|  |         for ref in ( | ||||||
|  |                 animal, | ||||||
|  |                 LazyReference(Animal, animal.pk), | ||||||
|  |                 {'_cls': 'Animal', '_ref': DBRef(animal._get_collection_name(), animal.pk)}, | ||||||
|  |  | ||||||
|  |                 sub_animal, | ||||||
|  |                 LazyReference(SubAnimal, sub_animal.pk), | ||||||
|  |                 {'_cls': 'SubAnimal', '_ref': DBRef(sub_animal._get_collection_name(), sub_animal.pk)}, | ||||||
|  |                 ): | ||||||
|  |             p = Ocurrence(person="test", animal=ref).save() | ||||||
|  |             p.reload() | ||||||
|  |             self.assertIsInstance(p.animal, (LazyReference, Document)) | ||||||
|  |             p.animal.fetch() | ||||||
|  |  | ||||||
|  |     def test_generic_lazy_reference_bad_set(self): | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             tag = StringField() | ||||||
|  |  | ||||||
|  |         class Ocurrence(Document): | ||||||
|  |             person = StringField() | ||||||
|  |             animal = GenericLazyReferenceField(choices=['Animal']) | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         Ocurrence.drop_collection() | ||||||
|  |  | ||||||
|  |         class BadDoc(Document): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         animal = Animal(name="Leopard", tag="heavy").save() | ||||||
|  |         baddoc = BadDoc().save() | ||||||
|  |         for bad in ( | ||||||
|  |                 42, | ||||||
|  |                 'foo', | ||||||
|  |                 baddoc, | ||||||
|  |                 LazyReference(BadDoc, animal.pk) | ||||||
|  |                 ): | ||||||
|  |             with self.assertRaises(ValidationError): | ||||||
|  |                 p = Ocurrence(person="test", animal=bad).save() | ||||||
|  |  | ||||||
|  |     def test_generic_lazy_reference_query_conversion(self): | ||||||
|  |         class Member(Document): | ||||||
|  |             user_num = IntField(primary_key=True) | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             title = StringField() | ||||||
|  |             author = GenericLazyReferenceField() | ||||||
|  |  | ||||||
|  |         Member.drop_collection() | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         m1 = Member(user_num=1) | ||||||
|  |         m1.save() | ||||||
|  |         m2 = Member(user_num=2) | ||||||
|  |         m2.save() | ||||||
|  |  | ||||||
|  |         post1 = BlogPost(title='post 1', author=m1) | ||||||
|  |         post1.save() | ||||||
|  |  | ||||||
|  |         post2 = BlogPost(title='post 2', author=m2) | ||||||
|  |         post2.save() | ||||||
|  |  | ||||||
|  |         post = BlogPost.objects(author=m1).first() | ||||||
|  |         self.assertEqual(post.id, post1.id) | ||||||
|  |  | ||||||
|  |         post = BlogPost.objects(author=m2).first() | ||||||
|  |         self.assertEqual(post.id, post2.id) | ||||||
|  |  | ||||||
|  |         # Same thing by passing a LazyReference instance | ||||||
|  |         post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() | ||||||
|  |         self.assertEqual(post.id, post2.id) | ||||||
|  |  | ||||||
|  |     def test_generic_lazy_reference_not_set(self): | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             tag = StringField() | ||||||
|  |  | ||||||
|  |         class Ocurrence(Document): | ||||||
|  |             person = StringField() | ||||||
|  |             animal = GenericLazyReferenceField() | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         Ocurrence.drop_collection() | ||||||
|  |  | ||||||
|  |         Ocurrence(person='foo').save() | ||||||
|  |         p = Ocurrence.objects.get() | ||||||
|  |         self.assertIs(p.animal, None) | ||||||
|  |  | ||||||
|  |     def test_generic_lazy_reference_accepts_string_instead_of_class(self): | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             tag = StringField() | ||||||
|  |  | ||||||
|  |         class Ocurrence(Document): | ||||||
|  |             person = StringField() | ||||||
|  |             animal = GenericLazyReferenceField('Animal') | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         Ocurrence.drop_collection() | ||||||
|  |  | ||||||
|  |         animal = Animal().save() | ||||||
|  |         Ocurrence(animal=animal).save() | ||||||
|  |         p = Ocurrence.objects.get() | ||||||
|  |         self.assertEqual(p.animal, animal) | ||||||
|  |  | ||||||
|  |     def test_generic_lazy_reference_embedded(self): | ||||||
|  |         class Animal(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             tag = StringField() | ||||||
|  |  | ||||||
|  |         class EmbeddedOcurrence(EmbeddedDocument): | ||||||
|  |             in_list = ListField(GenericLazyReferenceField()) | ||||||
|  |             direct = GenericLazyReferenceField() | ||||||
|  |  | ||||||
|  |         class Ocurrence(Document): | ||||||
|  |             in_list = ListField(GenericLazyReferenceField()) | ||||||
|  |             in_embedded = EmbeddedDocumentField(EmbeddedOcurrence) | ||||||
|  |             direct = GenericLazyReferenceField() | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         Ocurrence.drop_collection() | ||||||
|  |  | ||||||
|  |         animal1 = Animal('doggo').save() | ||||||
|  |         animal2 = Animal('cheeta').save() | ||||||
|  |  | ||||||
|  |         def check_fields_type(occ): | ||||||
|  |             self.assertIsInstance(occ.direct, LazyReference) | ||||||
|  |             for elem in occ.in_list: | ||||||
|  |                 self.assertIsInstance(elem, LazyReference) | ||||||
|  |             self.assertIsInstance(occ.in_embedded.direct, LazyReference) | ||||||
|  |             for elem in occ.in_embedded.in_list: | ||||||
|  |                 self.assertIsInstance(elem, LazyReference) | ||||||
|  |  | ||||||
|  |         occ = Ocurrence( | ||||||
|  |             in_list=[animal1, animal2], | ||||||
|  |             in_embedded={'in_list': [animal1, animal2], 'direct': animal1}, | ||||||
|  |             direct=animal1 | ||||||
|  |         ).save() | ||||||
|  |         check_fields_type(occ) | ||||||
|  |         occ.reload() | ||||||
|  |         check_fields_type(occ) | ||||||
|  |         animal1_ref = {'_cls': 'Animal', '_ref': DBRef(animal1._get_collection_name(), animal1.pk)} | ||||||
|  |         animal2_ref = {'_cls': 'Animal', '_ref': DBRef(animal2._get_collection_name(), animal2.pk)} | ||||||
|  |         occ.direct = animal1_ref | ||||||
|  |         occ.in_list = [animal1_ref, animal2_ref] | ||||||
|  |         occ.in_embedded.direct = animal1_ref | ||||||
|  |         occ.in_embedded.in_list = [animal1_ref, animal2_ref] | ||||||
|  |         check_fields_type(occ) | ||||||
							
								
								
									
										56
									
								
								tests/fields/test_long_field.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										56
									
								
								tests/fields/test_long_field.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,56 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | import six | ||||||
|  |  | ||||||
|  | try: | ||||||
|  |     from bson.int64 import Int64 | ||||||
|  | except ImportError: | ||||||
|  |     Int64 = long | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  | from mongoengine.connection import get_db | ||||||
|  |  | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestLongField(MongoDBTestCase): | ||||||
|  |  | ||||||
|  |     def test_long_field_is_considered_as_int64(self): | ||||||
|  |         """ | ||||||
|  |         Tests that long fields are stored as long in mongo, even if long | ||||||
|  |         value is small enough to be an int. | ||||||
|  |         """ | ||||||
|  |         class TestLongFieldConsideredAsInt64(Document): | ||||||
|  |             some_long = LongField() | ||||||
|  |  | ||||||
|  |         doc = TestLongFieldConsideredAsInt64(some_long=42).save() | ||||||
|  |         db = get_db() | ||||||
|  |         self.assertIsInstance(db.test_long_field_considered_as_int64.find()[0]['some_long'], Int64) | ||||||
|  |         self.assertIsInstance(doc.some_long, six.integer_types) | ||||||
|  |  | ||||||
|  |     def test_long_validation(self): | ||||||
|  |         """Ensure that invalid values cannot be assigned to long fields. | ||||||
|  |         """ | ||||||
|  |         class TestDocument(Document): | ||||||
|  |             value = LongField(min_value=0, max_value=110) | ||||||
|  |  | ||||||
|  |         doc = TestDocument() | ||||||
|  |         doc.value = 50 | ||||||
|  |         doc.validate() | ||||||
|  |  | ||||||
|  |         doc.value = -1 | ||||||
|  |         self.assertRaises(ValidationError, doc.validate) | ||||||
|  |         doc.value = 120 | ||||||
|  |         self.assertRaises(ValidationError, doc.validate) | ||||||
|  |         doc.value = 'ten' | ||||||
|  |         self.assertRaises(ValidationError, doc.validate) | ||||||
|  |  | ||||||
|  |     def test_long_ne_operator(self): | ||||||
|  |         class TestDocument(Document): | ||||||
|  |             long_fld = LongField() | ||||||
|  |  | ||||||
|  |         TestDocument.drop_collection() | ||||||
|  |  | ||||||
|  |         TestDocument(long_fld=None).save() | ||||||
|  |         TestDocument(long_fld=1).save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(1, TestDocument.objects(long_fld__ne=None).count()) | ||||||
							
								
								
									
										144
									
								
								tests/fields/test_map_field.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										144
									
								
								tests/fields/test_map_field.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,144 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | import datetime | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  |  | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestMapField(MongoDBTestCase): | ||||||
|  |  | ||||||
|  |     def test_mapfield(self): | ||||||
|  |         """Ensure that the MapField handles the declared type.""" | ||||||
|  |         class Simple(Document): | ||||||
|  |             mapping = MapField(IntField()) | ||||||
|  |  | ||||||
|  |         Simple.drop_collection() | ||||||
|  |  | ||||||
|  |         e = Simple() | ||||||
|  |         e.mapping['someint'] = 1 | ||||||
|  |         e.save() | ||||||
|  |  | ||||||
|  |         with self.assertRaises(ValidationError): | ||||||
|  |             e.mapping['somestring'] = "abc" | ||||||
|  |             e.save() | ||||||
|  |  | ||||||
|  |         with self.assertRaises(ValidationError): | ||||||
|  |             class NoDeclaredType(Document): | ||||||
|  |                 mapping = MapField() | ||||||
|  |  | ||||||
|  |     def test_complex_mapfield(self): | ||||||
|  |         """Ensure that the MapField can handle complex declared types.""" | ||||||
|  |  | ||||||
|  |         class SettingBase(EmbeddedDocument): | ||||||
|  |             meta = {"allow_inheritance": True} | ||||||
|  |  | ||||||
|  |         class StringSetting(SettingBase): | ||||||
|  |             value = StringField() | ||||||
|  |  | ||||||
|  |         class IntegerSetting(SettingBase): | ||||||
|  |             value = IntField() | ||||||
|  |  | ||||||
|  |         class Extensible(Document): | ||||||
|  |             mapping = MapField(EmbeddedDocumentField(SettingBase)) | ||||||
|  |  | ||||||
|  |         Extensible.drop_collection() | ||||||
|  |  | ||||||
|  |         e = Extensible() | ||||||
|  |         e.mapping['somestring'] = StringSetting(value='foo') | ||||||
|  |         e.mapping['someint'] = IntegerSetting(value=42) | ||||||
|  |         e.save() | ||||||
|  |  | ||||||
|  |         e2 = Extensible.objects.get(id=e.id) | ||||||
|  |         self.assertIsInstance(e2.mapping['somestring'], StringSetting) | ||||||
|  |         self.assertIsInstance(e2.mapping['someint'], IntegerSetting) | ||||||
|  |  | ||||||
|  |         with self.assertRaises(ValidationError): | ||||||
|  |             e.mapping['someint'] = 123 | ||||||
|  |             e.save() | ||||||
|  |  | ||||||
|  |     def test_embedded_mapfield_db_field(self): | ||||||
|  |         class Embedded(EmbeddedDocument): | ||||||
|  |             number = IntField(default=0, db_field='i') | ||||||
|  |  | ||||||
|  |         class Test(Document): | ||||||
|  |             my_map = MapField(field=EmbeddedDocumentField(Embedded), | ||||||
|  |                               db_field='x') | ||||||
|  |  | ||||||
|  |         Test.drop_collection() | ||||||
|  |  | ||||||
|  |         test = Test() | ||||||
|  |         test.my_map['DICTIONARY_KEY'] = Embedded(number=1) | ||||||
|  |         test.save() | ||||||
|  |  | ||||||
|  |         Test.objects.update_one(inc__my_map__DICTIONARY_KEY__number=1) | ||||||
|  |  | ||||||
|  |         test = Test.objects.get() | ||||||
|  |         self.assertEqual(test.my_map['DICTIONARY_KEY'].number, 2) | ||||||
|  |         doc = self.db.test.find_one() | ||||||
|  |         self.assertEqual(doc['x']['DICTIONARY_KEY']['i'], 2) | ||||||
|  |  | ||||||
|  |     def test_mapfield_numerical_index(self): | ||||||
|  |         """Ensure that MapField accept numeric strings as indexes.""" | ||||||
|  |  | ||||||
|  |         class Embedded(EmbeddedDocument): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Test(Document): | ||||||
|  |             my_map = MapField(EmbeddedDocumentField(Embedded)) | ||||||
|  |  | ||||||
|  |         Test.drop_collection() | ||||||
|  |  | ||||||
|  |         test = Test() | ||||||
|  |         test.my_map['1'] = Embedded(name='test') | ||||||
|  |         test.save() | ||||||
|  |         test.my_map['1'].name = 'test updated' | ||||||
|  |         test.save() | ||||||
|  |  | ||||||
|  |     def test_map_field_lookup(self): | ||||||
|  |         """Ensure MapField lookups succeed on Fields without a lookup | ||||||
|  |         method. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Action(EmbeddedDocument): | ||||||
|  |             operation = StringField() | ||||||
|  |             object = StringField() | ||||||
|  |  | ||||||
|  |         class Log(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             visited = MapField(DateTimeField()) | ||||||
|  |             actions = MapField(EmbeddedDocumentField(Action)) | ||||||
|  |  | ||||||
|  |         Log.drop_collection() | ||||||
|  |         Log(name="wilson", visited={'friends': datetime.datetime.now()}, | ||||||
|  |             actions={'friends': Action(operation='drink', object='beer')}).save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(1, Log.objects( | ||||||
|  |             visited__friends__exists=True).count()) | ||||||
|  |  | ||||||
|  |         self.assertEqual(1, Log.objects( | ||||||
|  |             actions__friends__operation='drink', | ||||||
|  |             actions__friends__object='beer').count()) | ||||||
|  |  | ||||||
|  |     def test_map_field_unicode(self): | ||||||
|  |         class Info(EmbeddedDocument): | ||||||
|  |             description = StringField() | ||||||
|  |             value_list = ListField(field=StringField()) | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             info_dict = MapField(field=EmbeddedDocumentField(Info)) | ||||||
|  |  | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         tree = BlogPost(info_dict={ | ||||||
|  |             u"éééé": { | ||||||
|  |                 'description': u"VALUE: éééé" | ||||||
|  |             } | ||||||
|  |         }) | ||||||
|  |  | ||||||
|  |         tree.save() | ||||||
|  |  | ||||||
|  |         self.assertEqual( | ||||||
|  |             BlogPost.objects.get(id=tree.id).info_dict[u"éééé"].description, | ||||||
|  |             u"VALUE: éééé" | ||||||
|  |         ) | ||||||
							
								
								
									
										219
									
								
								tests/fields/test_reference_field.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										219
									
								
								tests/fields/test_reference_field.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,219 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | from bson import SON, DBRef | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  |  | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestReferenceField(MongoDBTestCase): | ||||||
|  |     def test_reference_validation(self): | ||||||
|  |         """Ensure that invalid document objects cannot be assigned to | ||||||
|  |         reference fields. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class User(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             content = StringField() | ||||||
|  |             author = ReferenceField(User) | ||||||
|  |  | ||||||
|  |         User.drop_collection() | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         # Make sure ReferenceField only accepts a document class or a string | ||||||
|  |         # with a document class name. | ||||||
|  |         self.assertRaises(ValidationError, ReferenceField, EmbeddedDocument) | ||||||
|  |  | ||||||
|  |         user = User(name='Test User') | ||||||
|  |  | ||||||
|  |         # Ensure that the referenced object must have been saved | ||||||
|  |         post1 = BlogPost(content='Chips and gravy taste good.') | ||||||
|  |         post1.author = user | ||||||
|  |         self.assertRaises(ValidationError, post1.save) | ||||||
|  |  | ||||||
|  |         # Check that an invalid object type cannot be used | ||||||
|  |         post2 = BlogPost(content='Chips and chilli taste good.') | ||||||
|  |         post1.author = post2 | ||||||
|  |         self.assertRaises(ValidationError, post1.validate) | ||||||
|  |  | ||||||
|  |         # Ensure ObjectID's are accepted as references | ||||||
|  |         user_object_id = user.pk | ||||||
|  |         post3 = BlogPost(content="Chips and curry sauce taste good.") | ||||||
|  |         post3.author = user_object_id | ||||||
|  |         post3.save() | ||||||
|  |  | ||||||
|  |         # Make sure referencing a saved document of the right type works | ||||||
|  |         user.save() | ||||||
|  |         post1.author = user | ||||||
|  |         post1.save() | ||||||
|  |  | ||||||
|  |         # Make sure referencing a saved document of the *wrong* type fails | ||||||
|  |         post2.save() | ||||||
|  |         post1.author = post2 | ||||||
|  |         self.assertRaises(ValidationError, post1.validate) | ||||||
|  |  | ||||||
|  |     def test_objectid_reference_fields(self): | ||||||
|  |         """Make sure storing Object ID references works.""" | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             parent = ReferenceField('self') | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         p1 = Person(name="John").save() | ||||||
|  |         Person(name="Ross", parent=p1.pk).save() | ||||||
|  |  | ||||||
|  |         p = Person.objects.get(name="Ross") | ||||||
|  |         self.assertEqual(p.parent, p1) | ||||||
|  |  | ||||||
|  |     def test_dbref_reference_fields(self): | ||||||
|  |         """Make sure storing references as bson.dbref.DBRef works.""" | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             parent = ReferenceField('self', dbref=True) | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         p1 = Person(name="John").save() | ||||||
|  |         Person(name="Ross", parent=p1).save() | ||||||
|  |  | ||||||
|  |         self.assertEqual( | ||||||
|  |             Person._get_collection().find_one({'name': 'Ross'})['parent'], | ||||||
|  |             DBRef('person', p1.pk) | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         p = Person.objects.get(name="Ross") | ||||||
|  |         self.assertEqual(p.parent, p1) | ||||||
|  |  | ||||||
|  |     def test_dbref_to_mongo(self): | ||||||
|  |         """Make sure that calling to_mongo on a ReferenceField which | ||||||
|  |         has dbref=False, but actually actually contains a DBRef returns | ||||||
|  |         an ID of that DBRef. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             parent = ReferenceField('self', dbref=False) | ||||||
|  |  | ||||||
|  |         p = Person( | ||||||
|  |             name='Steve', | ||||||
|  |             parent=DBRef('person', 'abcdefghijklmnop') | ||||||
|  |         ) | ||||||
|  |         self.assertEqual(p.to_mongo(), SON([ | ||||||
|  |             ('name', u'Steve'), | ||||||
|  |             ('parent', 'abcdefghijklmnop') | ||||||
|  |         ])) | ||||||
|  |  | ||||||
|  |     def test_objectid_reference_fields(self): | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             parent = ReferenceField('self', dbref=False) | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         p1 = Person(name="John").save() | ||||||
|  |         Person(name="Ross", parent=p1).save() | ||||||
|  |  | ||||||
|  |         col = Person._get_collection() | ||||||
|  |         data = col.find_one({'name': 'Ross'}) | ||||||
|  |         self.assertEqual(data['parent'], p1.pk) | ||||||
|  |  | ||||||
|  |         p = Person.objects.get(name="Ross") | ||||||
|  |         self.assertEqual(p.parent, p1) | ||||||
|  |  | ||||||
|  |     def test_undefined_reference(self): | ||||||
|  |         """Ensure that ReferenceFields may reference undefined Documents. | ||||||
|  |         """ | ||||||
|  |         class Product(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             company = ReferenceField('Company') | ||||||
|  |  | ||||||
|  |         class Company(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         Product.drop_collection() | ||||||
|  |         Company.drop_collection() | ||||||
|  |  | ||||||
|  |         ten_gen = Company(name='10gen') | ||||||
|  |         ten_gen.save() | ||||||
|  |         mongodb = Product(name='MongoDB', company=ten_gen) | ||||||
|  |         mongodb.save() | ||||||
|  |  | ||||||
|  |         me = Product(name='MongoEngine') | ||||||
|  |         me.save() | ||||||
|  |  | ||||||
|  |         obj = Product.objects(company=ten_gen).first() | ||||||
|  |         self.assertEqual(obj, mongodb) | ||||||
|  |         self.assertEqual(obj.company, ten_gen) | ||||||
|  |  | ||||||
|  |         obj = Product.objects(company=None).first() | ||||||
|  |         self.assertEqual(obj, me) | ||||||
|  |  | ||||||
|  |         obj = Product.objects.get(company=None) | ||||||
|  |         self.assertEqual(obj, me) | ||||||
|  |  | ||||||
|  |     def test_reference_query_conversion(self): | ||||||
|  |         """Ensure that ReferenceFields can be queried using objects and values | ||||||
|  |         of the type of the primary key of the referenced object. | ||||||
|  |         """ | ||||||
|  |         class Member(Document): | ||||||
|  |             user_num = IntField(primary_key=True) | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             title = StringField() | ||||||
|  |             author = ReferenceField(Member, dbref=False) | ||||||
|  |  | ||||||
|  |         Member.drop_collection() | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         m1 = Member(user_num=1) | ||||||
|  |         m1.save() | ||||||
|  |         m2 = Member(user_num=2) | ||||||
|  |         m2.save() | ||||||
|  |  | ||||||
|  |         post1 = BlogPost(title='post 1', author=m1) | ||||||
|  |         post1.save() | ||||||
|  |  | ||||||
|  |         post2 = BlogPost(title='post 2', author=m2) | ||||||
|  |         post2.save() | ||||||
|  |  | ||||||
|  |         post = BlogPost.objects(author=m1).first() | ||||||
|  |         self.assertEqual(post.id, post1.id) | ||||||
|  |  | ||||||
|  |         post = BlogPost.objects(author=m2).first() | ||||||
|  |         self.assertEqual(post.id, post2.id) | ||||||
|  |  | ||||||
|  |     def test_reference_query_conversion_dbref(self): | ||||||
|  |         """Ensure that ReferenceFields can be queried using objects and values | ||||||
|  |         of the type of the primary key of the referenced object. | ||||||
|  |         """ | ||||||
|  |         class Member(Document): | ||||||
|  |             user_num = IntField(primary_key=True) | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             title = StringField() | ||||||
|  |             author = ReferenceField(Member, dbref=True) | ||||||
|  |  | ||||||
|  |         Member.drop_collection() | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         m1 = Member(user_num=1) | ||||||
|  |         m1.save() | ||||||
|  |         m2 = Member(user_num=2) | ||||||
|  |         m2.save() | ||||||
|  |  | ||||||
|  |         post1 = BlogPost(title='post 1', author=m1) | ||||||
|  |         post1.save() | ||||||
|  |  | ||||||
|  |         post2 = BlogPost(title='post 2', author=m2) | ||||||
|  |         post2.save() | ||||||
|  |  | ||||||
|  |         post = BlogPost.objects(author=m1).first() | ||||||
|  |         self.assertEqual(post.id, post1.id) | ||||||
|  |  | ||||||
|  |         post = BlogPost.objects(author=m2).first() | ||||||
|  |         self.assertEqual(post.id, post2.id) | ||||||
							
								
								
									
										271
									
								
								tests/fields/test_sequence_field.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										271
									
								
								tests/fields/test_sequence_field.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,271 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  |  | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestSequenceField(MongoDBTestCase): | ||||||
|  |     def test_sequence_field(self): | ||||||
|  |         class Person(Document): | ||||||
|  |             id = SequenceField(primary_key=True) | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         self.db['mongoengine.counters'].drop() | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         for x in range(10): | ||||||
|  |             Person(name="Person %s" % x).save() | ||||||
|  |  | ||||||
|  |         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) | ||||||
|  |         self.assertEqual(c['next'], 10) | ||||||
|  |  | ||||||
|  |         ids = [i.id for i in Person.objects] | ||||||
|  |         self.assertEqual(ids, range(1, 11)) | ||||||
|  |  | ||||||
|  |         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) | ||||||
|  |         self.assertEqual(c['next'], 10) | ||||||
|  |  | ||||||
|  |         Person.id.set_next_value(1000) | ||||||
|  |         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) | ||||||
|  |         self.assertEqual(c['next'], 1000) | ||||||
|  |  | ||||||
|  |     def test_sequence_field_get_next_value(self): | ||||||
|  |         class Person(Document): | ||||||
|  |             id = SequenceField(primary_key=True) | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         self.db['mongoengine.counters'].drop() | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         for x in range(10): | ||||||
|  |             Person(name="Person %s" % x).save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(Person.id.get_next_value(), 11) | ||||||
|  |         self.db['mongoengine.counters'].drop() | ||||||
|  |  | ||||||
|  |         self.assertEqual(Person.id.get_next_value(), 1) | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             id = SequenceField(primary_key=True, value_decorator=str) | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         self.db['mongoengine.counters'].drop() | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         for x in range(10): | ||||||
|  |             Person(name="Person %s" % x).save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(Person.id.get_next_value(), '11') | ||||||
|  |         self.db['mongoengine.counters'].drop() | ||||||
|  |  | ||||||
|  |         self.assertEqual(Person.id.get_next_value(), '1') | ||||||
|  |  | ||||||
|  |     def test_sequence_field_sequence_name(self): | ||||||
|  |         class Person(Document): | ||||||
|  |             id = SequenceField(primary_key=True, sequence_name='jelly') | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         self.db['mongoengine.counters'].drop() | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         for x in range(10): | ||||||
|  |             Person(name="Person %s" % x).save() | ||||||
|  |  | ||||||
|  |         c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'}) | ||||||
|  |         self.assertEqual(c['next'], 10) | ||||||
|  |  | ||||||
|  |         ids = [i.id for i in Person.objects] | ||||||
|  |         self.assertEqual(ids, range(1, 11)) | ||||||
|  |  | ||||||
|  |         c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'}) | ||||||
|  |         self.assertEqual(c['next'], 10) | ||||||
|  |  | ||||||
|  |         Person.id.set_next_value(1000) | ||||||
|  |         c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'}) | ||||||
|  |         self.assertEqual(c['next'], 1000) | ||||||
|  |  | ||||||
|  |     def test_multiple_sequence_fields(self): | ||||||
|  |         class Person(Document): | ||||||
|  |             id = SequenceField(primary_key=True) | ||||||
|  |             counter = SequenceField() | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         self.db['mongoengine.counters'].drop() | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         for x in range(10): | ||||||
|  |             Person(name="Person %s" % x).save() | ||||||
|  |  | ||||||
|  |         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) | ||||||
|  |         self.assertEqual(c['next'], 10) | ||||||
|  |  | ||||||
|  |         ids = [i.id for i in Person.objects] | ||||||
|  |         self.assertEqual(ids, range(1, 11)) | ||||||
|  |  | ||||||
|  |         counters = [i.counter for i in Person.objects] | ||||||
|  |         self.assertEqual(counters, range(1, 11)) | ||||||
|  |  | ||||||
|  |         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) | ||||||
|  |         self.assertEqual(c['next'], 10) | ||||||
|  |  | ||||||
|  |         Person.id.set_next_value(1000) | ||||||
|  |         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) | ||||||
|  |         self.assertEqual(c['next'], 1000) | ||||||
|  |  | ||||||
|  |         Person.counter.set_next_value(999) | ||||||
|  |         c = self.db['mongoengine.counters'].find_one({'_id': 'person.counter'}) | ||||||
|  |         self.assertEqual(c['next'], 999) | ||||||
|  |  | ||||||
|  |     def test_sequence_fields_reload(self): | ||||||
|  |         class Animal(Document): | ||||||
|  |             counter = SequenceField() | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         self.db['mongoengine.counters'].drop() | ||||||
|  |         Animal.drop_collection() | ||||||
|  |  | ||||||
|  |         a = Animal(name="Boi").save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(a.counter, 1) | ||||||
|  |         a.reload() | ||||||
|  |         self.assertEqual(a.counter, 1) | ||||||
|  |  | ||||||
|  |         a.counter = None | ||||||
|  |         self.assertEqual(a.counter, 2) | ||||||
|  |         a.save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(a.counter, 2) | ||||||
|  |  | ||||||
|  |         a = Animal.objects.first() | ||||||
|  |         self.assertEqual(a.counter, 2) | ||||||
|  |         a.reload() | ||||||
|  |         self.assertEqual(a.counter, 2) | ||||||
|  |  | ||||||
|  |     def test_multiple_sequence_fields_on_docs(self): | ||||||
|  |         class Animal(Document): | ||||||
|  |             id = SequenceField(primary_key=True) | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             id = SequenceField(primary_key=True) | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         self.db['mongoengine.counters'].drop() | ||||||
|  |         Animal.drop_collection() | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         for x in range(10): | ||||||
|  |             Animal(name="Animal %s" % x).save() | ||||||
|  |             Person(name="Person %s" % x).save() | ||||||
|  |  | ||||||
|  |         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) | ||||||
|  |         self.assertEqual(c['next'], 10) | ||||||
|  |  | ||||||
|  |         c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'}) | ||||||
|  |         self.assertEqual(c['next'], 10) | ||||||
|  |  | ||||||
|  |         ids = [i.id for i in Person.objects] | ||||||
|  |         self.assertEqual(ids, range(1, 11)) | ||||||
|  |  | ||||||
|  |         id = [i.id for i in Animal.objects] | ||||||
|  |         self.assertEqual(id, range(1, 11)) | ||||||
|  |  | ||||||
|  |         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) | ||||||
|  |         self.assertEqual(c['next'], 10) | ||||||
|  |  | ||||||
|  |         c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'}) | ||||||
|  |         self.assertEqual(c['next'], 10) | ||||||
|  |  | ||||||
|  |     def test_sequence_field_value_decorator(self): | ||||||
|  |         class Person(Document): | ||||||
|  |             id = SequenceField(primary_key=True, value_decorator=str) | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         self.db['mongoengine.counters'].drop() | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         for x in range(10): | ||||||
|  |             p = Person(name="Person %s" % x) | ||||||
|  |             p.save() | ||||||
|  |  | ||||||
|  |         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) | ||||||
|  |         self.assertEqual(c['next'], 10) | ||||||
|  |  | ||||||
|  |         ids = [i.id for i in Person.objects] | ||||||
|  |         self.assertEqual(ids, map(str, range(1, 11))) | ||||||
|  |  | ||||||
|  |         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) | ||||||
|  |         self.assertEqual(c['next'], 10) | ||||||
|  |  | ||||||
|  |     def test_embedded_sequence_field(self): | ||||||
|  |         class Comment(EmbeddedDocument): | ||||||
|  |             id = SequenceField() | ||||||
|  |             content = StringField(required=True) | ||||||
|  |  | ||||||
|  |         class Post(Document): | ||||||
|  |             title = StringField(required=True) | ||||||
|  |             comments = ListField(EmbeddedDocumentField(Comment)) | ||||||
|  |  | ||||||
|  |         self.db['mongoengine.counters'].drop() | ||||||
|  |         Post.drop_collection() | ||||||
|  |  | ||||||
|  |         Post(title="MongoEngine", | ||||||
|  |              comments=[Comment(content="NoSQL Rocks"), | ||||||
|  |                        Comment(content="MongoEngine Rocks")]).save() | ||||||
|  |         c = self.db['mongoengine.counters'].find_one({'_id': 'comment.id'}) | ||||||
|  |         self.assertEqual(c['next'], 2) | ||||||
|  |         post = Post.objects.first() | ||||||
|  |         self.assertEqual(1, post.comments[0].id) | ||||||
|  |         self.assertEqual(2, post.comments[1].id) | ||||||
|  |  | ||||||
|  |     def test_inherited_sequencefield(self): | ||||||
|  |         class Base(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             counter = SequenceField() | ||||||
|  |             meta = {'abstract': True} | ||||||
|  |  | ||||||
|  |         class Foo(Base): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Bar(Base): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         bar = Bar(name='Bar') | ||||||
|  |         bar.save() | ||||||
|  |  | ||||||
|  |         foo = Foo(name='Foo') | ||||||
|  |         foo.save() | ||||||
|  |  | ||||||
|  |         self.assertTrue('base.counter' in | ||||||
|  |                         self.db['mongoengine.counters'].find().distinct('_id')) | ||||||
|  |         self.assertFalse(('foo.counter' or 'bar.counter') in | ||||||
|  |                          self.db['mongoengine.counters'].find().distinct('_id')) | ||||||
|  |         self.assertNotEqual(foo.counter, bar.counter) | ||||||
|  |         self.assertEqual(foo._fields['counter'].owner_document, Base) | ||||||
|  |         self.assertEqual(bar._fields['counter'].owner_document, Base) | ||||||
|  |  | ||||||
|  |     def test_no_inherited_sequencefield(self): | ||||||
|  |         class Base(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {'abstract': True} | ||||||
|  |  | ||||||
|  |         class Foo(Base): | ||||||
|  |             counter = SequenceField() | ||||||
|  |  | ||||||
|  |         class Bar(Base): | ||||||
|  |             counter = SequenceField() | ||||||
|  |  | ||||||
|  |         bar = Bar(name='Bar') | ||||||
|  |         bar.save() | ||||||
|  |  | ||||||
|  |         foo = Foo(name='Foo') | ||||||
|  |         foo.save() | ||||||
|  |  | ||||||
|  |         self.assertFalse('base.counter' in | ||||||
|  |                          self.db['mongoengine.counters'].find().distinct('_id')) | ||||||
|  |         self.assertTrue(('foo.counter' and 'bar.counter') in | ||||||
|  |                          self.db['mongoengine.counters'].find().distinct('_id')) | ||||||
|  |         self.assertEqual(foo.counter, bar.counter) | ||||||
|  |         self.assertEqual(foo._fields['counter'].owner_document, Foo) | ||||||
|  |         self.assertEqual(bar._fields['counter'].owner_document, Bar) | ||||||
							
								
								
									
										59
									
								
								tests/fields/test_url_field.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										59
									
								
								tests/fields/test_url_field.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,59 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | from mongoengine import * | ||||||
|  |  | ||||||
|  | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestURLField(MongoDBTestCase): | ||||||
|  |  | ||||||
|  |     def test_validation(self): | ||||||
|  |         """Ensure that URLFields validate urls properly.""" | ||||||
|  |         class Link(Document): | ||||||
|  |             url = URLField() | ||||||
|  |  | ||||||
|  |         link = Link() | ||||||
|  |         link.url = 'google' | ||||||
|  |         self.assertRaises(ValidationError, link.validate) | ||||||
|  |  | ||||||
|  |         link.url = 'http://www.google.com:8080' | ||||||
|  |         link.validate() | ||||||
|  |  | ||||||
|  |     def test_unicode_url_validation(self): | ||||||
|  |         """Ensure unicode URLs are validated properly.""" | ||||||
|  |         class Link(Document): | ||||||
|  |             url = URLField() | ||||||
|  |  | ||||||
|  |         link = Link() | ||||||
|  |         link.url = u'http://привет.com' | ||||||
|  |  | ||||||
|  |         # TODO fix URL validation - this *IS* a valid URL | ||||||
|  |         # For now we just want to make sure that the error message is correct | ||||||
|  |         with self.assertRaises(ValidationError) as ctx_err: | ||||||
|  |             link.validate() | ||||||
|  |         self.assertEqual(unicode(ctx_err.exception), | ||||||
|  |                          u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])") | ||||||
|  |  | ||||||
|  |     def test_url_scheme_validation(self): | ||||||
|  |         """Ensure that URLFields validate urls with specific schemes properly. | ||||||
|  |         """ | ||||||
|  |         class Link(Document): | ||||||
|  |             url = URLField() | ||||||
|  |  | ||||||
|  |         class SchemeLink(Document): | ||||||
|  |             url = URLField(schemes=['ws', 'irc']) | ||||||
|  |  | ||||||
|  |         link = Link() | ||||||
|  |         link.url = 'ws://google.com' | ||||||
|  |         self.assertRaises(ValidationError, link.validate) | ||||||
|  |  | ||||||
|  |         scheme_link = SchemeLink() | ||||||
|  |         scheme_link.url = 'ws://google.com' | ||||||
|  |         scheme_link.validate() | ||||||
|  |  | ||||||
|  |     def test_underscore_allowed_in_domains_names(self): | ||||||
|  |         class Link(Document): | ||||||
|  |             url = URLField() | ||||||
|  |  | ||||||
|  |         link = Link() | ||||||
|  |         link.url = 'https://san_leandro-ca.geebo.com' | ||||||
|  |         link.validate() | ||||||
							
								
								
									
										65
									
								
								tests/fields/test_uuid_field.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										65
									
								
								tests/fields/test_uuid_field.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,65 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | import uuid | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  |  | ||||||
|  | from tests.utils import MongoDBTestCase, get_as_pymongo | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class Person(Document): | ||||||
|  |     api_key = UUIDField(binary=False) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestUUIDField(MongoDBTestCase): | ||||||
|  |     def test_storage(self): | ||||||
|  |         uid = uuid.uuid4() | ||||||
|  |         person = Person(api_key=uid).save() | ||||||
|  |         self.assertEqual( | ||||||
|  |             get_as_pymongo(person), | ||||||
|  |             {'_id': person.id, | ||||||
|  |              'api_key': str(uid) | ||||||
|  |              } | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |     def test_field_string(self): | ||||||
|  |         """Test UUID fields storing as String | ||||||
|  |         """ | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         uu = uuid.uuid4() | ||||||
|  |         Person(api_key=uu).save() | ||||||
|  |         self.assertEqual(1, Person.objects(api_key=uu).count()) | ||||||
|  |         self.assertEqual(uu, Person.objects.first().api_key) | ||||||
|  |  | ||||||
|  |         person = Person() | ||||||
|  |         valid = (uuid.uuid4(), uuid.uuid1()) | ||||||
|  |         for api_key in valid: | ||||||
|  |             person.api_key = api_key | ||||||
|  |             person.validate() | ||||||
|  |  | ||||||
|  |         invalid = ('9d159858-549b-4975-9f98-dd2f987c113g', | ||||||
|  |                    '9d159858-549b-4975-9f98-dd2f987c113') | ||||||
|  |         for api_key in invalid: | ||||||
|  |             person.api_key = api_key | ||||||
|  |             self.assertRaises(ValidationError, person.validate) | ||||||
|  |  | ||||||
|  |     def test_field_binary(self): | ||||||
|  |         """Test UUID fields storing as Binary object.""" | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         uu = uuid.uuid4() | ||||||
|  |         Person(api_key=uu).save() | ||||||
|  |         self.assertEqual(1, Person.objects(api_key=uu).count()) | ||||||
|  |         self.assertEqual(uu, Person.objects.first().api_key) | ||||||
|  |  | ||||||
|  |         person = Person() | ||||||
|  |         valid = (uuid.uuid4(), uuid.uuid1()) | ||||||
|  |         for api_key in valid: | ||||||
|  |             person.api_key = api_key | ||||||
|  |             person.validate() | ||||||
|  |  | ||||||
|  |         invalid = ('9d159858-549b-4975-9f98-dd2f987c113g', | ||||||
|  |                    '9d159858-549b-4975-9f98-dd2f987c113') | ||||||
|  |         for api_key in invalid: | ||||||
|  |             person.api_key = api_key | ||||||
|  |             self.assertRaises(ValidationError, person.validate) | ||||||
| @@ -48,6 +48,7 @@ class PickleSignalsTest(Document): | |||||||
|     def post_delete(self, sender, document, **kwargs): |     def post_delete(self, sender, document, **kwargs): | ||||||
|         pickled = pickle.dumps(document) |         pickled = pickle.dumps(document) | ||||||
|  |  | ||||||
|  |  | ||||||
| signals.post_save.connect(PickleSignalsTest.post_save, sender=PickleSignalsTest) | signals.post_save.connect(PickleSignalsTest.post_save, sender=PickleSignalsTest) | ||||||
| signals.post_delete.connect(PickleSignalsTest.post_delete, sender=PickleSignalsTest) | signals.post_delete.connect(PickleSignalsTest.post_delete, sender=PickleSignalsTest) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,6 +1,6 @@ | |||||||
| from transform import * | from .transform import * | ||||||
| from field_list import * | from .field_list import * | ||||||
| from queryset import * | from .queryset import * | ||||||
| from visitor import * | from .visitor import * | ||||||
| from geo import * | from .geo import * | ||||||
| from modify import * | from .modify import * | ||||||
|   | |||||||
| @@ -181,7 +181,7 @@ class OnlyExcludeAllTest(unittest.TestCase): | |||||||
|         employee.save() |         employee.save() | ||||||
|  |  | ||||||
|         obj = self.Person.objects(id=employee.id).only('age').get() |         obj = self.Person.objects(id=employee.id).only('age').get() | ||||||
|         self.assertTrue(isinstance(obj, Employee)) |         self.assertIsInstance(obj, Employee) | ||||||
|  |  | ||||||
|         # Check field names are looked up properly |         # Check field names are looked up properly | ||||||
|         obj = Employee.objects(id=employee.id).only('salary').get() |         obj = Employee.objects(id=employee.id).only('salary').get() | ||||||
| @@ -208,7 +208,7 @@ class OnlyExcludeAllTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|         post = BlogPost(content='Had a good coffee today...', various={'test_dynamic':{'some': True}}) |         post = BlogPost(content='Had a good coffee today...', various={'test_dynamic': {'some': True}}) | ||||||
|         post.author = User(name='Test User') |         post.author = User(name='Test User') | ||||||
|         post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')] |         post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')] | ||||||
|         post.save() |         post.save() | ||||||
| @@ -413,7 +413,6 @@ class OnlyExcludeAllTest(unittest.TestCase): | |||||||
|         numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get() |         numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get() | ||||||
|         self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1]) |         self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1]) | ||||||
|  |  | ||||||
|  |  | ||||||
|     def test_exclude_from_subclasses_docs(self): |     def test_exclude_from_subclasses_docs(self): | ||||||
|  |  | ||||||
|         class Base(Document): |         class Base(Document): | ||||||
| @@ -436,5 +435,6 @@ class OnlyExcludeAllTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         self.assertRaises(LookUpError, Base.objects.exclude, "made_up") |         self.assertRaises(LookUpError, Base.objects.exclude, "made_up") | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|     unittest.main() |     unittest.main() | ||||||
|   | |||||||
| @@ -3,7 +3,7 @@ import unittest | |||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
|  |  | ||||||
| from tests.utils import MongoDBTestCase, needs_mongodb_v3 | from tests.utils import MongoDBTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
| __all__ = ("GeoQueriesTest",) | __all__ = ("GeoQueriesTest",) | ||||||
| @@ -70,9 +70,6 @@ class GeoQueriesTest(MongoDBTestCase): | |||||||
|         self.assertEqual(events.count(), 1) |         self.assertEqual(events.count(), 1) | ||||||
|         self.assertEqual(events[0], event2) |         self.assertEqual(events[0], event2) | ||||||
|  |  | ||||||
|     # $minDistance was added in MongoDB v2.6, but continued being buggy |  | ||||||
|     # until v3.0; skip for older versions |  | ||||||
|     @needs_mongodb_v3 |  | ||||||
|     def test_near_and_min_distance(self): |     def test_near_and_min_distance(self): | ||||||
|         """Ensure the "min_distance" operator works alongside the "near" |         """Ensure the "min_distance" operator works alongside the "near" | ||||||
|         operator. |         operator. | ||||||
| @@ -95,9 +92,9 @@ class GeoQueriesTest(MongoDBTestCase): | |||||||
|             location__within_distance=point_and_distance) |             location__within_distance=point_and_distance) | ||||||
|         self.assertEqual(events.count(), 2) |         self.assertEqual(events.count(), 2) | ||||||
|         events = list(events) |         events = list(events) | ||||||
|         self.assertTrue(event2 not in events) |         self.assertNotIn(event2, events) | ||||||
|         self.assertTrue(event1 in events) |         self.assertIn(event1, events) | ||||||
|         self.assertTrue(event3 in events) |         self.assertIn(event3, events) | ||||||
|  |  | ||||||
|         # find events within 10 degrees of san francisco |         # find events within 10 degrees of san francisco | ||||||
|         point_and_distance = [[-122.415579, 37.7566023], 10] |         point_and_distance = [[-122.415579, 37.7566023], 10] | ||||||
| @@ -243,9 +240,6 @@ class GeoQueriesTest(MongoDBTestCase): | |||||||
|         events = self.Event.objects(location__geo_within_polygon=polygon2) |         events = self.Event.objects(location__geo_within_polygon=polygon2) | ||||||
|         self.assertEqual(events.count(), 0) |         self.assertEqual(events.count(), 0) | ||||||
|  |  | ||||||
|     # $minDistance was added in MongoDB v2.6, but continued being buggy |  | ||||||
|     # until v3.0; skip for older versions |  | ||||||
|     @needs_mongodb_v3 |  | ||||||
|     def test_2dsphere_near_and_min_max_distance(self): |     def test_2dsphere_near_and_min_max_distance(self): | ||||||
|         """Ensure "min_distace" and "max_distance" operators work well |         """Ensure "min_distace" and "max_distance" operators work well | ||||||
|         together with the "near" operator in a 2dsphere index. |         together with the "near" operator in a 2dsphere index. | ||||||
| @@ -285,9 +279,9 @@ class GeoQueriesTest(MongoDBTestCase): | |||||||
|             location__geo_within_center=point_and_distance) |             location__geo_within_center=point_and_distance) | ||||||
|         self.assertEqual(events.count(), 2) |         self.assertEqual(events.count(), 2) | ||||||
|         events = list(events) |         events = list(events) | ||||||
|         self.assertTrue(event2 not in events) |         self.assertNotIn(event2, events) | ||||||
|         self.assertTrue(event1 in events) |         self.assertIn(event1, events) | ||||||
|         self.assertTrue(event3 in events) |         self.assertIn(event3, events) | ||||||
|  |  | ||||||
|     def _test_embedded(self, point_field_class): |     def _test_embedded(self, point_field_class): | ||||||
|         """Helper test method ensuring given point field class works |         """Helper test method ensuring given point field class works | ||||||
| @@ -328,8 +322,6 @@ class GeoQueriesTest(MongoDBTestCase): | |||||||
|         """Make sure PointField works properly in an embedded document.""" |         """Make sure PointField works properly in an embedded document.""" | ||||||
|         self._test_embedded(point_field_class=PointField) |         self._test_embedded(point_field_class=PointField) | ||||||
|  |  | ||||||
|     # Needs MongoDB > 2.6.4 https://jira.mongodb.org/browse/SERVER-14039 |  | ||||||
|     @needs_mongodb_v3 |  | ||||||
|     def test_spherical_geospatial_operators(self): |     def test_spherical_geospatial_operators(self): | ||||||
|         """Ensure that spherical geospatial queries are working.""" |         """Ensure that spherical geospatial queries are working.""" | ||||||
|         class Point(Document): |         class Point(Document): | ||||||
| @@ -534,11 +526,11 @@ class GeoQueriesTest(MongoDBTestCase): | |||||||
|  |  | ||||||
|         Location.drop_collection() |         Location.drop_collection() | ||||||
|  |  | ||||||
|         Location(loc=[1,2]).save() |         Location(loc=[1, 2]).save() | ||||||
|         loc = Location.objects.as_pymongo()[0] |         loc = Location.objects.as_pymongo()[0] | ||||||
|         self.assertEqual(loc["loc"], {"type": "Point", "coordinates": [1, 2]}) |         self.assertEqual(loc["loc"], {"type": "Point", "coordinates": [1, 2]}) | ||||||
|  |  | ||||||
|         Location.objects.update(set__loc=[2,1]) |         Location.objects.update(set__loc=[2, 1]) | ||||||
|         loc = Location.objects.as_pymongo()[0] |         loc = Location.objects.as_pymongo()[0] | ||||||
|         self.assertEqual(loc["loc"], {"type": "Point", "coordinates": [2, 1]}) |         self.assertEqual(loc["loc"], {"type": "Point", "coordinates": [2, 1]}) | ||||||
|  |  | ||||||
|   | |||||||
| @@ -2,8 +2,6 @@ import unittest | |||||||
|  |  | ||||||
| from mongoengine import connect, Document, IntField, StringField, ListField | from mongoengine import connect, Document, IntField, StringField, ListField | ||||||
|  |  | ||||||
| from tests.utils import needs_mongodb_v26 |  | ||||||
|  |  | ||||||
| __all__ = ("FindAndModifyTest",) | __all__ = ("FindAndModifyTest",) | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -96,7 +94,6 @@ class FindAndModifyTest(unittest.TestCase): | |||||||
|         self.assertEqual(old_doc.to_mongo(), {"_id": 1}) |         self.assertEqual(old_doc.to_mongo(), {"_id": 1}) | ||||||
|         self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) |         self.assertDbEqual([{"_id": 0, "value": 0}, {"_id": 1, "value": -1}]) | ||||||
|  |  | ||||||
|     @needs_mongodb_v26 |  | ||||||
|     def test_modify_with_push(self): |     def test_modify_with_push(self): | ||||||
|         class BlogPost(Document): |         class BlogPost(Document): | ||||||
|             tags = ListField(StringField()) |             tags = ListField(StringField()) | ||||||
|   | |||||||
| @@ -6,10 +6,12 @@ from mongoengine.connection import connect | |||||||
|  |  | ||||||
| __author__ = 'stas' | __author__ = 'stas' | ||||||
|  |  | ||||||
|  |  | ||||||
| class Person(Document): | class Person(Document): | ||||||
|     name = StringField() |     name = StringField() | ||||||
|     age = IntField() |     age = IntField() | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestQuerysetPickable(unittest.TestCase): | class TestQuerysetPickable(unittest.TestCase): | ||||||
|     """ |     """ | ||||||
|     Test for adding pickling support for QuerySet instances |     Test for adding pickling support for QuerySet instances | ||||||
| @@ -18,7 +20,7 @@ class TestQuerysetPickable(unittest.TestCase): | |||||||
|     def setUp(self): |     def setUp(self): | ||||||
|         super(TestQuerysetPickable, self).setUp() |         super(TestQuerysetPickable, self).setUp() | ||||||
|  |  | ||||||
|         connection = connect(db="test") #type: pymongo.mongo_client.MongoClient |         connection = connect(db="test")  # type: pymongo.mongo_client.MongoClient | ||||||
|  |  | ||||||
|         connection.drop_database("test") |         connection.drop_database("test") | ||||||
|  |  | ||||||
| @@ -27,7 +29,6 @@ class TestQuerysetPickable(unittest.TestCase): | |||||||
|             age=21 |             age=21 | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|  |  | ||||||
|     def test_picke_simple_qs(self): |     def test_picke_simple_qs(self): | ||||||
|  |  | ||||||
|         qs = Person.objects.all() |         qs = Person.objects.all() | ||||||
| @@ -46,10 +47,10 @@ class TestQuerysetPickable(unittest.TestCase): | |||||||
|  |  | ||||||
|         self.assertEqual(qs.count(), loadedQs.count()) |         self.assertEqual(qs.count(), loadedQs.count()) | ||||||
|  |  | ||||||
|         #can update loadedQs |         # can update loadedQs | ||||||
|         loadedQs.update(age=23) |         loadedQs.update(age=23) | ||||||
|  |  | ||||||
|         #check |         # check | ||||||
|         self.assertEqual(Person.objects.first().age, 23) |         self.assertEqual(Person.objects.first().age, 23) | ||||||
|  |  | ||||||
|     def test_pickle_support_filtration(self): |     def test_pickle_support_filtration(self): | ||||||
|   | |||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -48,14 +48,14 @@ class TransformTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         for k, v in (("set", "$set"), ("set_on_insert", "$setOnInsert"), ("push", "$push")): |         for k, v in (("set", "$set"), ("set_on_insert", "$setOnInsert"), ("push", "$push")): | ||||||
|             update = transform.update(DicDoc, **{"%s__dictField__test" % k: doc}) |             update = transform.update(DicDoc, **{"%s__dictField__test" % k: doc}) | ||||||
|             self.assertTrue(isinstance(update[v]["dictField.test"], dict)) |             self.assertIsInstance(update[v]["dictField.test"], dict) | ||||||
|  |  | ||||||
|         # Update special cases |         # Update special cases | ||||||
|         update = transform.update(DicDoc, unset__dictField__test=doc) |         update = transform.update(DicDoc, unset__dictField__test=doc) | ||||||
|         self.assertEqual(update["$unset"]["dictField.test"], 1) |         self.assertEqual(update["$unset"]["dictField.test"], 1) | ||||||
|  |  | ||||||
|         update = transform.update(DicDoc, pull__dictField__test=doc) |         update = transform.update(DicDoc, pull__dictField__test=doc) | ||||||
|         self.assertTrue(isinstance(update["$pull"]["dictField"]["test"], dict)) |         self.assertIsInstance(update["$pull"]["dictField"]["test"], dict) | ||||||
|  |  | ||||||
|         update = transform.update(LisDoc, pull__foo__in=['a']) |         update = transform.update(LisDoc, pull__foo__in=['a']) | ||||||
|         self.assertEqual(update, {'$pull': {'foo': {'$in': ['a']}}}) |         self.assertEqual(update, {'$pull': {'foo': {'$in': ['a']}}}) | ||||||
| @@ -71,6 +71,14 @@ class TransformTest(unittest.TestCase): | |||||||
|         update = transform.update(BlogPost, push_all__tags=['mongo', 'db']) |         update = transform.update(BlogPost, push_all__tags=['mongo', 'db']) | ||||||
|         self.assertEqual(update, {'$push': {'tags': {'$each': ['mongo', 'db']}}}) |         self.assertEqual(update, {'$push': {'tags': {'$each': ['mongo', 'db']}}}) | ||||||
|  |  | ||||||
|  |     def test_transform_update_no_operator_default_to_set(self): | ||||||
|  |         """Ensure the differences in behvaior between 'push' and 'push_all'""" | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             tags = ListField(StringField()) | ||||||
|  |  | ||||||
|  |         update = transform.update(BlogPost, tags=['mongo', 'db']) | ||||||
|  |         self.assertEqual(update, {'$set': {'tags': ['mongo', 'db']}}) | ||||||
|  |  | ||||||
|     def test_query_field_name(self): |     def test_query_field_name(self): | ||||||
|         """Ensure that the correct field name is used when querying. |         """Ensure that the correct field name is used when querying. | ||||||
|         """ |         """ | ||||||
| @@ -88,17 +96,15 @@ class TransformTest(unittest.TestCase): | |||||||
|         post = BlogPost(**data) |         post = BlogPost(**data) | ||||||
|         post.save() |         post.save() | ||||||
|  |  | ||||||
|         self.assertTrue('postTitle' in |         self.assertIn('postTitle', BlogPost.objects(title=data['title'])._query) | ||||||
|                         BlogPost.objects(title=data['title'])._query) |  | ||||||
|         self.assertFalse('title' in |         self.assertFalse('title' in | ||||||
|                          BlogPost.objects(title=data['title'])._query) |                          BlogPost.objects(title=data['title'])._query) | ||||||
|         self.assertEqual(BlogPost.objects(title=data['title']).count(), 1) |         self.assertEqual(BlogPost.objects(title=data['title']).count(), 1) | ||||||
|  |  | ||||||
|         self.assertTrue('_id' in BlogPost.objects(pk=post.id)._query) |         self.assertIn('_id', BlogPost.objects(pk=post.id)._query) | ||||||
|         self.assertEqual(BlogPost.objects(pk=post.id).count(), 1) |         self.assertEqual(BlogPost.objects(pk=post.id).count(), 1) | ||||||
|  |  | ||||||
|         self.assertTrue('postComments.commentContent' in |         self.assertIn('postComments.commentContent', BlogPost.objects(comments__content='test')._query) | ||||||
|                         BlogPost.objects(comments__content='test')._query) |  | ||||||
|         self.assertEqual(BlogPost.objects(comments__content='test').count(), 1) |         self.assertEqual(BlogPost.objects(comments__content='test').count(), 1) | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
| @@ -116,8 +122,8 @@ class TransformTest(unittest.TestCase): | |||||||
|         post = BlogPost(**data) |         post = BlogPost(**data) | ||||||
|         post.save() |         post.save() | ||||||
|  |  | ||||||
|         self.assertTrue('_id' in BlogPost.objects(pk=data['title'])._query) |         self.assertIn('_id', BlogPost.objects(pk=data['title'])._query) | ||||||
|         self.assertTrue('_id' in BlogPost.objects(title=data['title'])._query) |         self.assertIn('_id', BlogPost.objects(title=data['title'])._query) | ||||||
|         self.assertEqual(BlogPost.objects(pk=data['title']).count(), 1) |         self.assertEqual(BlogPost.objects(pk=data['title']).count(), 1) | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
| @@ -285,6 +291,11 @@ class TransformTest(unittest.TestCase): | |||||||
|         update = transform.update(MainDoc, pull__content__heading='xyz') |         update = transform.update(MainDoc, pull__content__heading='xyz') | ||||||
|         self.assertEqual(update, {'$pull': {'content.heading': 'xyz'}}) |         self.assertEqual(update, {'$pull': {'content.heading': 'xyz'}}) | ||||||
|  |  | ||||||
|  |         update = transform.update(MainDoc, pull__content__text__word__in=['foo', 'bar']) | ||||||
|  |         self.assertEqual(update, {'$pull': {'content.text': {'word': {'$in': ['foo', 'bar']}}}}) | ||||||
|  |  | ||||||
|  |         update = transform.update(MainDoc, pull__content__text__word__nin=['foo', 'bar']) | ||||||
|  |         self.assertEqual(update, {'$pull': {'content.text': {'word': {'$nin': ['foo', 'bar']}}}}) | ||||||
|  |  | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|     unittest.main() |     unittest.main() | ||||||
|   | |||||||
| @@ -196,7 +196,7 @@ class QTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         test2 = test.clone() |         test2 = test.clone() | ||||||
|         self.assertEqual(test2.count(), 3) |         self.assertEqual(test2.count(), 3) | ||||||
|         self.assertFalse(test2 == test) |         self.assertNotEqual(test2, test) | ||||||
|  |  | ||||||
|         test3 = test2.filter(x=6) |         test3 = test2.filter(x=6) | ||||||
|         self.assertEqual(test3.count(), 1) |         self.assertEqual(test3.count(), 1) | ||||||
| @@ -275,7 +275,6 @@ class QTest(unittest.TestCase): | |||||||
|         with self.assertRaises(InvalidQueryError): |         with self.assertRaises(InvalidQueryError): | ||||||
|             self.Person.objects.filter('user1') |             self.Person.objects.filter('user1') | ||||||
|  |  | ||||||
|  |  | ||||||
|     def test_q_regex(self): |     def test_q_regex(self): | ||||||
|         """Ensure that Q objects can be queried using regexes. |         """Ensure that Q objects can be queried using regexes. | ||||||
|         """ |         """ | ||||||
| @@ -296,6 +295,18 @@ class QTest(unittest.TestCase): | |||||||
|         obj = self.Person.objects(Q(name__not=re.compile('^Gui'))).first() |         obj = self.Person.objects(Q(name__not=re.compile('^Gui'))).first() | ||||||
|         self.assertEqual(obj, None) |         self.assertEqual(obj, None) | ||||||
|  |  | ||||||
|  |     def test_q_repr(self): | ||||||
|  |         self.assertEqual(repr(Q()), 'Q(**{})') | ||||||
|  |         self.assertEqual(repr(Q(name='test')), "Q(**{'name': 'test'})") | ||||||
|  |  | ||||||
|  |         self.assertEqual( | ||||||
|  |             repr(Q(name='test') & Q(age__gte=18)), | ||||||
|  |             "(Q(**{'name': 'test'}) & Q(**{'age__gte': 18}))") | ||||||
|  |  | ||||||
|  |         self.assertEqual( | ||||||
|  |             repr(Q(name='test') | Q(age__gte=18)), | ||||||
|  |             "(Q(**{'name': 'test'}) | Q(**{'age__gte': 18}))") | ||||||
|  |  | ||||||
|     def test_q_lists(self): |     def test_q_lists(self): | ||||||
|         """Ensure that Q objects query ListFields correctly. |         """Ensure that Q objects query ListFields correctly. | ||||||
|         """ |         """ | ||||||
|   | |||||||
							
								
								
									
										15
									
								
								tests/test_common.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								tests/test_common.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,15 @@ | |||||||
|  | import unittest | ||||||
|  |  | ||||||
|  | from mongoengine.common import _import_class | ||||||
|  | from mongoengine import Document | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestCommon(unittest.TestCase): | ||||||
|  |  | ||||||
|  |     def test__import_class(self): | ||||||
|  |         doc_cls = _import_class("Document") | ||||||
|  |         self.assertIs(doc_cls, Document) | ||||||
|  |  | ||||||
|  |     def test__import_class_raise_if_not_known(self): | ||||||
|  |         with self.assertRaises(ValueError): | ||||||
|  |             _import_class("UnknownClass") | ||||||
| @@ -1,5 +1,8 @@ | |||||||
| import datetime | import datetime | ||||||
| from pymongo.errors import OperationFailure |  | ||||||
|  | from pymongo import MongoClient | ||||||
|  | from pymongo.errors import OperationFailure, InvalidName | ||||||
|  | from pymongo import ReadPreference | ||||||
|  |  | ||||||
| try: | try: | ||||||
|     import unittest2 as unittest |     import unittest2 as unittest | ||||||
| @@ -12,23 +15,27 @@ from bson.tz_util import utc | |||||||
|  |  | ||||||
| from mongoengine import ( | from mongoengine import ( | ||||||
|     connect, register_connection, |     connect, register_connection, | ||||||
|     Document, DateTimeField |     Document, DateTimeField, | ||||||
| ) |     disconnect_all, StringField) | ||||||
| from mongoengine.python_support import IS_PYMONGO_3 |  | ||||||
| import mongoengine.connection | import mongoengine.connection | ||||||
| from mongoengine.connection import (MongoEngineConnectionError, get_db, | from mongoengine.connection import (MongoEngineConnectionError, get_db, | ||||||
|                                     get_connection) |                                     get_connection, disconnect, DEFAULT_DATABASE_NAME) | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_tz_awareness(connection): | def get_tz_awareness(connection): | ||||||
|     if not IS_PYMONGO_3: |     return connection.codec_options.tz_aware | ||||||
|         return connection.tz_aware |  | ||||||
|     else: |  | ||||||
|         return connection.codec_options.tz_aware |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ConnectionTest(unittest.TestCase): | class ConnectionTest(unittest.TestCase): | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def setUpClass(cls): | ||||||
|  |         disconnect_all() | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def tearDownClass(cls): | ||||||
|  |         disconnect_all() | ||||||
|  |  | ||||||
|     def tearDown(self): |     def tearDown(self): | ||||||
|         mongoengine.connection._connection_settings = {} |         mongoengine.connection._connection_settings = {} | ||||||
|         mongoengine.connection._connections = {} |         mongoengine.connection._connections = {} | ||||||
| @@ -39,15 +46,156 @@ class ConnectionTest(unittest.TestCase): | |||||||
|         connect('mongoenginetest') |         connect('mongoenginetest') | ||||||
|  |  | ||||||
|         conn = get_connection() |         conn = get_connection() | ||||||
|         self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) |         self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) | ||||||
|  |  | ||||||
|         db = get_db() |         db = get_db() | ||||||
|         self.assertTrue(isinstance(db, pymongo.database.Database)) |         self.assertIsInstance(db, pymongo.database.Database) | ||||||
|         self.assertEqual(db.name, 'mongoenginetest') |         self.assertEqual(db.name, 'mongoenginetest') | ||||||
|  |  | ||||||
|         connect('mongoenginetest2', alias='testdb') |         connect('mongoenginetest2', alias='testdb') | ||||||
|         conn = get_connection('testdb') |         conn = get_connection('testdb') | ||||||
|         self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) |         self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) | ||||||
|  |  | ||||||
|  |     def test_connect_disconnect_works_properly(self): | ||||||
|  |         class History1(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {'db_alias': 'db1'} | ||||||
|  |  | ||||||
|  |         class History2(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {'db_alias': 'db2'} | ||||||
|  |  | ||||||
|  |         connect('db1', alias='db1') | ||||||
|  |         connect('db2', alias='db2') | ||||||
|  |  | ||||||
|  |         History1.drop_collection() | ||||||
|  |         History2.drop_collection() | ||||||
|  |  | ||||||
|  |         h = History1(name='default').save() | ||||||
|  |         h1 = History2(name='db1').save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(list(History1.objects().as_pymongo()), | ||||||
|  |                          [{'_id': h.id, 'name': 'default'}]) | ||||||
|  |         self.assertEqual(list(History2.objects().as_pymongo()), | ||||||
|  |                          [{'_id': h1.id, 'name': 'db1'}]) | ||||||
|  |  | ||||||
|  |         disconnect('db1') | ||||||
|  |         disconnect('db2') | ||||||
|  |  | ||||||
|  |         with self.assertRaises(MongoEngineConnectionError): | ||||||
|  |             list(History1.objects().as_pymongo()) | ||||||
|  |  | ||||||
|  |         with self.assertRaises(MongoEngineConnectionError): | ||||||
|  |             list(History2.objects().as_pymongo()) | ||||||
|  |  | ||||||
|  |         connect('db1', alias='db1') | ||||||
|  |         connect('db2', alias='db2') | ||||||
|  |  | ||||||
|  |         self.assertEqual(list(History1.objects().as_pymongo()), | ||||||
|  |                          [{'_id': h.id, 'name': 'default'}]) | ||||||
|  |         self.assertEqual(list(History2.objects().as_pymongo()), | ||||||
|  |                          [{'_id': h1.id, 'name': 'db1'}]) | ||||||
|  |  | ||||||
|  |     def test_connect_different_documents_to_different_database(self): | ||||||
|  |         class History(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class History1(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {'db_alias': 'db1'} | ||||||
|  |  | ||||||
|  |         class History2(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {'db_alias': 'db2'} | ||||||
|  |  | ||||||
|  |         connect() | ||||||
|  |         connect('db1', alias='db1') | ||||||
|  |         connect('db2', alias='db2') | ||||||
|  |  | ||||||
|  |         History.drop_collection() | ||||||
|  |         History1.drop_collection() | ||||||
|  |         History2.drop_collection() | ||||||
|  |  | ||||||
|  |         h = History(name='default').save() | ||||||
|  |         h1 = History1(name='db1').save() | ||||||
|  |         h2 = History2(name='db2').save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(History._collection.database.name, DEFAULT_DATABASE_NAME) | ||||||
|  |         self.assertEqual(History1._collection.database.name, 'db1') | ||||||
|  |         self.assertEqual(History2._collection.database.name, 'db2') | ||||||
|  |  | ||||||
|  |         self.assertEqual(list(History.objects().as_pymongo()), | ||||||
|  |                          [{'_id': h.id, 'name': 'default'}]) | ||||||
|  |         self.assertEqual(list(History1.objects().as_pymongo()), | ||||||
|  |                          [{'_id': h1.id, 'name': 'db1'}]) | ||||||
|  |         self.assertEqual(list(History2.objects().as_pymongo()), | ||||||
|  |                          [{'_id': h2.id, 'name': 'db2'}]) | ||||||
|  |  | ||||||
|  |     def test_connect_fails_if_connect_2_times_with_default_alias(self): | ||||||
|  |         connect('mongoenginetest') | ||||||
|  |  | ||||||
|  |         with self.assertRaises(MongoEngineConnectionError) as ctx_err: | ||||||
|  |             connect('mongoenginetest2') | ||||||
|  |         self.assertEqual("A different connection with alias `default` was already registered. Use disconnect() first", str(ctx_err.exception)) | ||||||
|  |  | ||||||
|  |     def test_connect_fails_if_connect_2_times_with_custom_alias(self): | ||||||
|  |         connect('mongoenginetest', alias='alias1') | ||||||
|  |  | ||||||
|  |         with self.assertRaises(MongoEngineConnectionError) as ctx_err: | ||||||
|  |             connect('mongoenginetest2', alias='alias1') | ||||||
|  |  | ||||||
|  |         self.assertEqual("A different connection with alias `alias1` was already registered. Use disconnect() first", str(ctx_err.exception)) | ||||||
|  |  | ||||||
|  |     def test_connect_fails_if_similar_connection_settings_arent_defined_the_same_way(self): | ||||||
|  |         """Intended to keep the detecton function simple but robust""" | ||||||
|  |         db_name = 'mongoenginetest' | ||||||
|  |         db_alias = 'alias1' | ||||||
|  |         connect(db=db_name, alias=db_alias, host='localhost', port=27017) | ||||||
|  |  | ||||||
|  |         with self.assertRaises(MongoEngineConnectionError): | ||||||
|  |             connect(host='mongodb://localhost:27017/%s' % db_name, alias=db_alias) | ||||||
|  |  | ||||||
|  |     def test_connect_passes_silently_connect_multiple_times_with_same_config(self): | ||||||
|  |         # test default connection to `test` | ||||||
|  |         connect() | ||||||
|  |         connect() | ||||||
|  |         self.assertEqual(len(mongoengine.connection._connections), 1) | ||||||
|  |         connect('test01', alias='test01') | ||||||
|  |         connect('test01', alias='test01') | ||||||
|  |         self.assertEqual(len(mongoengine.connection._connections), 2) | ||||||
|  |         connect(host='mongodb://localhost:27017/mongoenginetest02', alias='test02') | ||||||
|  |         connect(host='mongodb://localhost:27017/mongoenginetest02', alias='test02') | ||||||
|  |         self.assertEqual(len(mongoengine.connection._connections), 3) | ||||||
|  |  | ||||||
|  |     def test_connect_with_invalid_db_name(self): | ||||||
|  |         """Ensure that connect() method fails fast if db name is invalid | ||||||
|  |         """ | ||||||
|  |         with self.assertRaises(InvalidName): | ||||||
|  |             connect('mongomock://localhost') | ||||||
|  |  | ||||||
|  |     def test_connect_with_db_name_external(self): | ||||||
|  |         """Ensure that connect() works if db name is $external | ||||||
|  |         """ | ||||||
|  |         """Ensure that the connect() method works properly.""" | ||||||
|  |         connect('$external') | ||||||
|  |  | ||||||
|  |         conn = get_connection() | ||||||
|  |         self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) | ||||||
|  |  | ||||||
|  |         db = get_db() | ||||||
|  |         self.assertIsInstance(db, pymongo.database.Database) | ||||||
|  |         self.assertEqual(db.name, '$external') | ||||||
|  |  | ||||||
|  |         connect('$external', alias='testdb') | ||||||
|  |         conn = get_connection('testdb') | ||||||
|  |         self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) | ||||||
|  |  | ||||||
|  |     def test_connect_with_invalid_db_name_type(self): | ||||||
|  |         """Ensure that connect() method fails fast if db name has invalid type | ||||||
|  |         """ | ||||||
|  |         with self.assertRaises(TypeError): | ||||||
|  |             non_string_db_name = ['e. g. list instead of a string'] | ||||||
|  |             connect(non_string_db_name) | ||||||
|  |  | ||||||
|     def test_connect_in_mocking(self): |     def test_connect_in_mocking(self): | ||||||
|         """Ensure that the connect() method works properly in mocking. |         """Ensure that the connect() method works properly in mocking. | ||||||
| @@ -59,31 +207,31 @@ class ConnectionTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         connect('mongoenginetest', host='mongomock://localhost') |         connect('mongoenginetest', host='mongomock://localhost') | ||||||
|         conn = get_connection() |         conn = get_connection() | ||||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) |         self.assertIsInstance(conn, mongomock.MongoClient) | ||||||
|  |  | ||||||
|         connect('mongoenginetest2', host='mongomock://localhost', alias='testdb2') |         connect('mongoenginetest2', host='mongomock://localhost', alias='testdb2') | ||||||
|         conn = get_connection('testdb2') |         conn = get_connection('testdb2') | ||||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) |         self.assertIsInstance(conn, mongomock.MongoClient) | ||||||
|  |  | ||||||
|         connect('mongoenginetest3', host='mongodb://localhost', is_mock=True, alias='testdb3') |         connect('mongoenginetest3', host='mongodb://localhost', is_mock=True, alias='testdb3') | ||||||
|         conn = get_connection('testdb3') |         conn = get_connection('testdb3') | ||||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) |         self.assertIsInstance(conn, mongomock.MongoClient) | ||||||
|  |  | ||||||
|         connect('mongoenginetest4', is_mock=True, alias='testdb4') |         connect('mongoenginetest4', is_mock=True, alias='testdb4') | ||||||
|         conn = get_connection('testdb4') |         conn = get_connection('testdb4') | ||||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) |         self.assertIsInstance(conn, mongomock.MongoClient) | ||||||
|  |  | ||||||
|         connect(host='mongodb://localhost:27017/mongoenginetest5', is_mock=True, alias='testdb5') |         connect(host='mongodb://localhost:27017/mongoenginetest5', is_mock=True, alias='testdb5') | ||||||
|         conn = get_connection('testdb5') |         conn = get_connection('testdb5') | ||||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) |         self.assertIsInstance(conn, mongomock.MongoClient) | ||||||
|  |  | ||||||
|         connect(host='mongomock://localhost:27017/mongoenginetest6', alias='testdb6') |         connect(host='mongomock://localhost:27017/mongoenginetest6', alias='testdb6') | ||||||
|         conn = get_connection('testdb6') |         conn = get_connection('testdb6') | ||||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) |         self.assertIsInstance(conn, mongomock.MongoClient) | ||||||
|  |  | ||||||
|         connect(host='mongomock://localhost:27017/mongoenginetest7', is_mock=True, alias='testdb7') |         connect(host='mongomock://localhost:27017/mongoenginetest7', is_mock=True, alias='testdb7') | ||||||
|         conn = get_connection('testdb7') |         conn = get_connection('testdb7') | ||||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) |         self.assertIsInstance(conn, mongomock.MongoClient) | ||||||
|  |  | ||||||
|     def test_connect_with_host_list(self): |     def test_connect_with_host_list(self): | ||||||
|         """Ensure that the connect() method works when host is a list |         """Ensure that the connect() method works when host is a list | ||||||
| @@ -97,35 +245,155 @@ class ConnectionTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         connect(host=['mongomock://localhost']) |         connect(host=['mongomock://localhost']) | ||||||
|         conn = get_connection() |         conn = get_connection() | ||||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) |         self.assertIsInstance(conn, mongomock.MongoClient) | ||||||
|  |  | ||||||
|         connect(host=['mongodb://localhost'], is_mock=True,  alias='testdb2') |         connect(host=['mongodb://localhost'], is_mock=True, alias='testdb2') | ||||||
|         conn = get_connection('testdb2') |         conn = get_connection('testdb2') | ||||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) |         self.assertIsInstance(conn, mongomock.MongoClient) | ||||||
|  |  | ||||||
|         connect(host=['localhost'], is_mock=True,  alias='testdb3') |         connect(host=['localhost'], is_mock=True, alias='testdb3') | ||||||
|         conn = get_connection('testdb3') |         conn = get_connection('testdb3') | ||||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) |         self.assertIsInstance(conn, mongomock.MongoClient) | ||||||
|  |  | ||||||
|         connect(host=['mongomock://localhost:27017', 'mongomock://localhost:27018'], alias='testdb4') |         connect(host=['mongomock://localhost:27017', 'mongomock://localhost:27018'], alias='testdb4') | ||||||
|         conn = get_connection('testdb4') |         conn = get_connection('testdb4') | ||||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) |         self.assertIsInstance(conn, mongomock.MongoClient) | ||||||
|  |  | ||||||
|         connect(host=['mongodb://localhost:27017', 'mongodb://localhost:27018'], is_mock=True,  alias='testdb5') |         connect(host=['mongodb://localhost:27017', 'mongodb://localhost:27018'], is_mock=True, alias='testdb5') | ||||||
|         conn = get_connection('testdb5') |         conn = get_connection('testdb5') | ||||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) |         self.assertIsInstance(conn, mongomock.MongoClient) | ||||||
|  |  | ||||||
|         connect(host=['localhost:27017', 'localhost:27018'], is_mock=True,  alias='testdb6') |         connect(host=['localhost:27017', 'localhost:27018'], is_mock=True, alias='testdb6') | ||||||
|         conn = get_connection('testdb6') |         conn = get_connection('testdb6') | ||||||
|         self.assertTrue(isinstance(conn, mongomock.MongoClient)) |         self.assertIsInstance(conn, mongomock.MongoClient) | ||||||
|  |  | ||||||
|     def test_disconnect(self): |     def test_disconnect_cleans_globals(self): | ||||||
|         """Ensure that the disconnect() method works properly |         """Ensure that the disconnect() method cleans the globals objects""" | ||||||
|         """ |         connections = mongoengine.connection._connections | ||||||
|  |         dbs = mongoengine.connection._dbs | ||||||
|  |         connection_settings = mongoengine.connection._connection_settings | ||||||
|  |  | ||||||
|  |         connect('mongoenginetest') | ||||||
|  |  | ||||||
|  |         self.assertEqual(len(connections), 1) | ||||||
|  |         self.assertEqual(len(dbs), 0) | ||||||
|  |         self.assertEqual(len(connection_settings), 1) | ||||||
|  |  | ||||||
|  |         class TestDoc(Document): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         TestDoc.drop_collection()  # triggers the db | ||||||
|  |         self.assertEqual(len(dbs), 1) | ||||||
|  |  | ||||||
|  |         disconnect() | ||||||
|  |         self.assertEqual(len(connections), 0) | ||||||
|  |         self.assertEqual(len(dbs), 0) | ||||||
|  |         self.assertEqual(len(connection_settings), 0) | ||||||
|  |  | ||||||
|  |     def test_disconnect_cleans_cached_collection_attribute_in_document(self): | ||||||
|  |         """Ensure that the disconnect() method works properly""" | ||||||
|         conn1 = connect('mongoenginetest') |         conn1 = connect('mongoenginetest') | ||||||
|         mongoengine.connection.disconnect() |  | ||||||
|         conn2 = connect('mongoenginetest') |         class History(Document): | ||||||
|         self.assertTrue(conn1 is not conn2) |             pass | ||||||
|  |  | ||||||
|  |         self.assertIsNone(History._collection) | ||||||
|  |  | ||||||
|  |         History.drop_collection() | ||||||
|  |  | ||||||
|  |         History.objects.first()     # will trigger the caching of _collection attribute | ||||||
|  |         self.assertIsNotNone(History._collection) | ||||||
|  |  | ||||||
|  |         disconnect() | ||||||
|  |  | ||||||
|  |         self.assertIsNone(History._collection) | ||||||
|  |  | ||||||
|  |         with self.assertRaises(MongoEngineConnectionError) as ctx_err: | ||||||
|  |             History.objects.first() | ||||||
|  |         self.assertEqual("You have not defined a default connection", str(ctx_err.exception)) | ||||||
|  |  | ||||||
|  |     def test_connect_disconnect_works_on_same_document(self): | ||||||
|  |         """Ensure that the connect/disconnect works properly with a single Document""" | ||||||
|  |         db1 = 'db1' | ||||||
|  |         db2 = 'db2' | ||||||
|  |  | ||||||
|  |         # Ensure freshness of the 2 databases through pymongo | ||||||
|  |         client = MongoClient('localhost', 27017) | ||||||
|  |         client.drop_database(db1) | ||||||
|  |         client.drop_database(db2) | ||||||
|  |  | ||||||
|  |         # Save in db1 | ||||||
|  |         connect(db1) | ||||||
|  |  | ||||||
|  |         class User(Document): | ||||||
|  |             name = StringField(required=True) | ||||||
|  |  | ||||||
|  |         user1 = User(name='John is in db1').save() | ||||||
|  |         disconnect() | ||||||
|  |  | ||||||
|  |         # Make sure save doesnt work at this stage | ||||||
|  |         with self.assertRaises(MongoEngineConnectionError): | ||||||
|  |             User(name='Wont work').save() | ||||||
|  |  | ||||||
|  |         # Save in db2 | ||||||
|  |         connect(db2) | ||||||
|  |         user2 = User(name='Bob is in db2').save() | ||||||
|  |         disconnect() | ||||||
|  |  | ||||||
|  |         db1_users = list(client[db1].user.find()) | ||||||
|  |         self.assertEqual(db1_users, [{'_id': user1.id, 'name': 'John is in db1'}]) | ||||||
|  |         db2_users = list(client[db2].user.find()) | ||||||
|  |         self.assertEqual(db2_users, [{'_id': user2.id, 'name': 'Bob is in db2'}]) | ||||||
|  |  | ||||||
|  |     def test_disconnect_silently_pass_if_alias_does_not_exist(self): | ||||||
|  |         connections = mongoengine.connection._connections | ||||||
|  |         self.assertEqual(len(connections), 0) | ||||||
|  |         disconnect(alias='not_exist') | ||||||
|  |  | ||||||
|  |     def test_disconnect_all(self): | ||||||
|  |         connections = mongoengine.connection._connections | ||||||
|  |         dbs = mongoengine.connection._dbs | ||||||
|  |         connection_settings = mongoengine.connection._connection_settings | ||||||
|  |  | ||||||
|  |         connect('mongoenginetest') | ||||||
|  |         connect('mongoenginetest2', alias='db1') | ||||||
|  |  | ||||||
|  |         class History(Document): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class History1(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {'db_alias': 'db1'} | ||||||
|  |  | ||||||
|  |         History.drop_collection()   # will trigger the caching of _collection attribute | ||||||
|  |         History.objects.first() | ||||||
|  |         History1.drop_collection() | ||||||
|  |         History1.objects.first() | ||||||
|  |  | ||||||
|  |         self.assertIsNotNone(History._collection) | ||||||
|  |         self.assertIsNotNone(History1._collection) | ||||||
|  |  | ||||||
|  |         self.assertEqual(len(connections), 2) | ||||||
|  |         self.assertEqual(len(dbs), 2) | ||||||
|  |         self.assertEqual(len(connection_settings), 2) | ||||||
|  |  | ||||||
|  |         disconnect_all() | ||||||
|  |  | ||||||
|  |         self.assertIsNone(History._collection) | ||||||
|  |         self.assertIsNone(History1._collection) | ||||||
|  |  | ||||||
|  |         self.assertEqual(len(connections), 0) | ||||||
|  |         self.assertEqual(len(dbs), 0) | ||||||
|  |         self.assertEqual(len(connection_settings), 0) | ||||||
|  |  | ||||||
|  |         with self.assertRaises(MongoEngineConnectionError): | ||||||
|  |             History.objects.first() | ||||||
|  |  | ||||||
|  |         with self.assertRaises(MongoEngineConnectionError): | ||||||
|  |             History1.objects.first() | ||||||
|  |  | ||||||
|  |     def test_disconnect_all_silently_pass_if_no_connection_exist(self): | ||||||
|  |         disconnect_all() | ||||||
|  |  | ||||||
|     def test_sharing_connections(self): |     def test_sharing_connections(self): | ||||||
|         """Ensure that connections are shared when the connection settings are exactly the same |         """Ensure that connections are shared when the connection settings are exactly the same | ||||||
| @@ -136,41 +404,31 @@ class ConnectionTest(unittest.TestCase): | |||||||
|         connect('mongoenginetests', alias='testdb2') |         connect('mongoenginetests', alias='testdb2') | ||||||
|         actual_connection = get_connection('testdb2') |         actual_connection = get_connection('testdb2') | ||||||
|  |  | ||||||
|         # Handle PyMongo 3+ Async Connection |         expected_connection.server_info() | ||||||
|         if IS_PYMONGO_3: |  | ||||||
|             # Ensure we are connected, throws ServerSelectionTimeoutError otherwise. |  | ||||||
|             # Purposely not catching exception to fail test if thrown. |  | ||||||
|             expected_connection.server_info() |  | ||||||
|  |  | ||||||
|         self.assertEqual(expected_connection, actual_connection) |         self.assertEqual(expected_connection, actual_connection) | ||||||
|  |  | ||||||
|     def test_connect_uri(self): |     def test_connect_uri(self): | ||||||
|         """Ensure that the connect() method works properly with URIs.""" |         """Ensure that the connect() method works properly with URIs.""" | ||||||
|         c = connect(db='mongoenginetest', alias='admin') |         c = connect(db='mongoenginetest', alias='admin') | ||||||
|         c.admin.system.users.remove({}) |         c.admin.system.users.delete_many({}) | ||||||
|         c.mongoenginetest.system.users.remove({}) |         c.mongoenginetest.system.users.delete_many({}) | ||||||
|  |  | ||||||
|         c.admin.add_user("admin", "password") |         c.admin.command("createUser", "admin", pwd="password", roles=["root"]) | ||||||
|         c.admin.authenticate("admin", "password") |         c.admin.authenticate("admin", "password") | ||||||
|         c.mongoenginetest.add_user("username", "password") |         c.admin.command("createUser", "username", pwd="password", roles=["dbOwner"]) | ||||||
|  |  | ||||||
|         if not IS_PYMONGO_3: |  | ||||||
|             self.assertRaises( |  | ||||||
|                 MongoEngineConnectionError, connect, 'testdb_uri_bad', |  | ||||||
|                 host='mongodb://test:password@localhost' |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest') |         connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest') | ||||||
|  |  | ||||||
|         conn = get_connection() |         conn = get_connection() | ||||||
|         self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) |         self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) | ||||||
|  |  | ||||||
|         db = get_db() |         db = get_db() | ||||||
|         self.assertTrue(isinstance(db, pymongo.database.Database)) |         self.assertIsInstance(db, pymongo.database.Database) | ||||||
|         self.assertEqual(db.name, 'mongoenginetest') |         self.assertEqual(db.name, 'mongoenginetest') | ||||||
|  |  | ||||||
|         c.admin.system.users.remove({}) |         c.admin.system.users.delete_many({}) | ||||||
|         c.mongoenginetest.system.users.remove({}) |         c.mongoenginetest.system.users.delete_many({}) | ||||||
|  |  | ||||||
|     def test_connect_uri_without_db(self): |     def test_connect_uri_without_db(self): | ||||||
|         """Ensure connect() method works properly if the URI doesn't |         """Ensure connect() method works properly if the URI doesn't | ||||||
| @@ -179,10 +437,10 @@ class ConnectionTest(unittest.TestCase): | |||||||
|         connect("mongoenginetest", host='mongodb://localhost/') |         connect("mongoenginetest", host='mongodb://localhost/') | ||||||
|  |  | ||||||
|         conn = get_connection() |         conn = get_connection() | ||||||
|         self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) |         self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) | ||||||
|  |  | ||||||
|         db = get_db() |         db = get_db() | ||||||
|         self.assertTrue(isinstance(db, pymongo.database.Database)) |         self.assertIsInstance(db, pymongo.database.Database) | ||||||
|         self.assertEqual(db.name, 'mongoenginetest') |         self.assertEqual(db.name, 'mongoenginetest') | ||||||
|  |  | ||||||
|     def test_connect_uri_default_db(self): |     def test_connect_uri_default_db(self): | ||||||
| @@ -192,10 +450,10 @@ class ConnectionTest(unittest.TestCase): | |||||||
|         connect(host='mongodb://localhost/') |         connect(host='mongodb://localhost/') | ||||||
|  |  | ||||||
|         conn = get_connection() |         conn = get_connection() | ||||||
|         self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) |         self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) | ||||||
|  |  | ||||||
|         db = get_db() |         db = get_db() | ||||||
|         self.assertTrue(isinstance(db, pymongo.database.Database)) |         self.assertIsInstance(db, pymongo.database.Database) | ||||||
|         self.assertEqual(db.name, 'test') |         self.assertEqual(db.name, 'test') | ||||||
|  |  | ||||||
|     def test_uri_without_credentials_doesnt_override_conn_settings(self): |     def test_uri_without_credentials_doesnt_override_conn_settings(self): | ||||||
| @@ -217,23 +475,16 @@ class ConnectionTest(unittest.TestCase): | |||||||
|         """ |         """ | ||||||
|         # Create users |         # Create users | ||||||
|         c = connect('mongoenginetest') |         c = connect('mongoenginetest') | ||||||
|         c.admin.system.users.remove({}) |  | ||||||
|         c.admin.add_user('username2', 'password') |         c.admin.system.users.delete_many({}) | ||||||
|  |         c.admin.command("createUser", "username2", pwd="password", roles=["dbOwner"]) | ||||||
|  |  | ||||||
|         # Authentication fails without "authSource" |         # Authentication fails without "authSource" | ||||||
|         if IS_PYMONGO_3: |         test_conn = connect( | ||||||
|             test_conn = connect( |             'mongoenginetest', alias='test1', | ||||||
|                 'mongoenginetest', alias='test1', |             host='mongodb://username2:password@localhost/mongoenginetest' | ||||||
|                 host='mongodb://username2:password@localhost/mongoenginetest' |         ) | ||||||
|             ) |         self.assertRaises(OperationFailure, test_conn.server_info) | ||||||
|             self.assertRaises(OperationFailure, test_conn.server_info) |  | ||||||
|         else: |  | ||||||
|             self.assertRaises( |  | ||||||
|                 MongoEngineConnectionError, |  | ||||||
|                 connect, 'mongoenginetest', alias='test1', |  | ||||||
|                 host='mongodb://username2:password@localhost/mongoenginetest' |  | ||||||
|             ) |  | ||||||
|             self.assertRaises(MongoEngineConnectionError, get_db, 'test1') |  | ||||||
|  |  | ||||||
|         # Authentication succeeds with "authSource" |         # Authentication succeeds with "authSource" | ||||||
|         authd_conn = connect( |         authd_conn = connect( | ||||||
| @@ -242,11 +493,11 @@ class ConnectionTest(unittest.TestCase): | |||||||
|                   'mongoenginetest?authSource=admin') |                   'mongoenginetest?authSource=admin') | ||||||
|         ) |         ) | ||||||
|         db = get_db('test2') |         db = get_db('test2') | ||||||
|         self.assertTrue(isinstance(db, pymongo.database.Database)) |         self.assertIsInstance(db, pymongo.database.Database) | ||||||
|         self.assertEqual(db.name, 'mongoenginetest') |         self.assertEqual(db.name, 'mongoenginetest') | ||||||
|  |  | ||||||
|         # Clear all users |         # Clear all users | ||||||
|         authd_conn.admin.system.users.remove({}) |         authd_conn.admin.system.users.delete_many({}) | ||||||
|  |  | ||||||
|     def test_register_connection(self): |     def test_register_connection(self): | ||||||
|         """Ensure that connections with different aliases may be registered. |         """Ensure that connections with different aliases may be registered. | ||||||
| @@ -255,10 +506,10 @@ class ConnectionTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         self.assertRaises(MongoEngineConnectionError, get_connection) |         self.assertRaises(MongoEngineConnectionError, get_connection) | ||||||
|         conn = get_connection('testdb') |         conn = get_connection('testdb') | ||||||
|         self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) |         self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) | ||||||
|  |  | ||||||
|         db = get_db('testdb') |         db = get_db('testdb') | ||||||
|         self.assertTrue(isinstance(db, pymongo.database.Database)) |         self.assertIsInstance(db, pymongo.database.Database) | ||||||
|         self.assertEqual(db.name, 'mongoenginetest2') |         self.assertEqual(db.name, 'mongoenginetest2') | ||||||
|  |  | ||||||
|     def test_register_connection_defaults(self): |     def test_register_connection_defaults(self): | ||||||
| @@ -267,7 +518,7 @@ class ConnectionTest(unittest.TestCase): | |||||||
|         register_connection('testdb', 'mongoenginetest', host=None, port=None) |         register_connection('testdb', 'mongoenginetest', host=None, port=None) | ||||||
|  |  | ||||||
|         conn = get_connection('testdb') |         conn = get_connection('testdb') | ||||||
|         self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) |         self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) | ||||||
|  |  | ||||||
|     def test_connection_kwargs(self): |     def test_connection_kwargs(self): | ||||||
|         """Ensure that connection kwargs get passed to pymongo.""" |         """Ensure that connection kwargs get passed to pymongo.""" | ||||||
| @@ -284,14 +535,7 @@ class ConnectionTest(unittest.TestCase): | |||||||
|         """Ensure we can specify a max connection pool size using |         """Ensure we can specify a max connection pool size using | ||||||
|         a connection kwarg. |         a connection kwarg. | ||||||
|         """ |         """ | ||||||
|         # Use "max_pool_size" or "maxpoolsize" depending on PyMongo version |         pool_size_kwargs = {'maxpoolsize': 100} | ||||||
|         # (former was changed to the latter as described in |  | ||||||
|         # https://jira.mongodb.org/browse/PYTHON-854). |  | ||||||
|         # TODO remove once PyMongo < 3.0 support is dropped |  | ||||||
|         if pymongo.version_tuple[0] >= 3: |  | ||||||
|             pool_size_kwargs = {'maxpoolsize': 100} |  | ||||||
|         else: |  | ||||||
|             pool_size_kwargs = {'max_pool_size': 100} |  | ||||||
|  |  | ||||||
|         conn = connect('mongoenginetest', alias='max_pool_size_via_kwarg', **pool_size_kwargs) |         conn = connect('mongoenginetest', alias='max_pool_size_via_kwarg', **pool_size_kwargs) | ||||||
|         self.assertEqual(conn.max_pool_size, 100) |         self.assertEqual(conn.max_pool_size, 100) | ||||||
| @@ -300,9 +544,6 @@ class ConnectionTest(unittest.TestCase): | |||||||
|         """Ensure we can specify a max connection pool size using |         """Ensure we can specify a max connection pool size using | ||||||
|         an option in a connection URI. |         an option in a connection URI. | ||||||
|         """ |         """ | ||||||
|         if pymongo.version_tuple[0] == 2 and pymongo.version_tuple[1] < 9: |  | ||||||
|             raise SkipTest('maxpoolsize as a URI option is only supported in PyMongo v2.9+') |  | ||||||
|  |  | ||||||
|         conn = connect(host='mongodb://localhost/test?maxpoolsize=100', alias='max_pool_size_via_uri') |         conn = connect(host='mongodb://localhost/test?maxpoolsize=100', alias='max_pool_size_via_uri') | ||||||
|         self.assertEqual(conn.max_pool_size, 100) |         self.assertEqual(conn.max_pool_size, 100) | ||||||
|  |  | ||||||
| @@ -312,46 +553,30 @@ class ConnectionTest(unittest.TestCase): | |||||||
|         """ |         """ | ||||||
|         conn1 = connect(alias='conn1', host='mongodb://localhost/testing?w=1&j=true') |         conn1 = connect(alias='conn1', host='mongodb://localhost/testing?w=1&j=true') | ||||||
|         conn2 = connect('testing', alias='conn2', w=1, j=True) |         conn2 = connect('testing', alias='conn2', w=1, j=True) | ||||||
|         if IS_PYMONGO_3: |         self.assertEqual(conn1.write_concern.document, {'w': 1, 'j': True}) | ||||||
|             self.assertEqual(conn1.write_concern.document, {'w': 1, 'j': True}) |         self.assertEqual(conn2.write_concern.document, {'w': 1, 'j': True}) | ||||||
|             self.assertEqual(conn2.write_concern.document, {'w': 1, 'j': True}) |  | ||||||
|         else: |  | ||||||
|             self.assertEqual(dict(conn1.write_concern), {'w': 1, 'j': True}) |  | ||||||
|             self.assertEqual(dict(conn2.write_concern), {'w': 1, 'j': True}) |  | ||||||
|  |  | ||||||
|     def test_connect_with_replicaset_via_uri(self): |     def test_connect_with_replicaset_via_uri(self): | ||||||
|         """Ensure connect() works when specifying a replicaSet via the |         """Ensure connect() works when specifying a replicaSet via the | ||||||
|         MongoDB URI. |         MongoDB URI. | ||||||
|         """ |         """ | ||||||
|         if IS_PYMONGO_3: |         c = connect(host='mongodb://localhost/test?replicaSet=local-rs') | ||||||
|             c = connect(host='mongodb://localhost/test?replicaSet=local-rs') |         db = get_db() | ||||||
|             db = get_db() |         self.assertIsInstance(db, pymongo.database.Database) | ||||||
|             self.assertTrue(isinstance(db, pymongo.database.Database)) |         self.assertEqual(db.name, 'test') | ||||||
|             self.assertEqual(db.name, 'test') |  | ||||||
|         else: |  | ||||||
|             # PyMongo < v3.x raises an exception: |  | ||||||
|             # "localhost:27017 is not a member of replica set local-rs" |  | ||||||
|             with self.assertRaises(MongoEngineConnectionError): |  | ||||||
|                 c = connect(host='mongodb://localhost/test?replicaSet=local-rs') |  | ||||||
|  |  | ||||||
|     def test_connect_with_replicaset_via_kwargs(self): |     def test_connect_with_replicaset_via_kwargs(self): | ||||||
|         """Ensure connect() works when specifying a replicaSet via the |         """Ensure connect() works when specifying a replicaSet via the | ||||||
|         connection kwargs |         connection kwargs | ||||||
|         """ |         """ | ||||||
|         if IS_PYMONGO_3: |         c = connect(replicaset='local-rs') | ||||||
|             c = connect(replicaset='local-rs') |         self.assertEqual(c._MongoClient__options.replica_set_name, | ||||||
|             self.assertEqual(c._MongoClient__options.replica_set_name, |                          'local-rs') | ||||||
|                              'local-rs') |         db = get_db() | ||||||
|             db = get_db() |         self.assertIsInstance(db, pymongo.database.Database) | ||||||
|             self.assertTrue(isinstance(db, pymongo.database.Database)) |         self.assertEqual(db.name, 'test') | ||||||
|             self.assertEqual(db.name, 'test') |  | ||||||
|         else: |  | ||||||
|             # PyMongo < v3.x raises an exception: |  | ||||||
|             # "localhost:27017 is not a member of replica set local-rs" |  | ||||||
|             with self.assertRaises(MongoEngineConnectionError): |  | ||||||
|                 c = connect(replicaset='local-rs') |  | ||||||
|  |  | ||||||
|     def test_datetime(self): |     def test_connect_tz_aware(self): | ||||||
|         connect('mongoenginetest', tz_aware=True) |         connect('mongoenginetest', tz_aware=True) | ||||||
|         d = datetime.datetime(2010, 5, 5, tzinfo=utc) |         d = datetime.datetime(2010, 5, 5, tzinfo=utc) | ||||||
|  |  | ||||||
| @@ -364,6 +589,10 @@ class ConnectionTest(unittest.TestCase): | |||||||
|         date_doc = DateDoc.objects.first() |         date_doc = DateDoc.objects.first() | ||||||
|         self.assertEqual(d, date_doc.the_date) |         self.assertEqual(d, date_doc.the_date) | ||||||
|  |  | ||||||
|  |     def test_read_preference_from_parse(self): | ||||||
|  |         conn = connect(host="mongodb://a1.vpc,a2.vpc,a3.vpc/prod?readPreference=secondaryPreferred") | ||||||
|  |         self.assertEqual(conn.read_preference, ReadPreference.SECONDARY_PREFERRED) | ||||||
|  |  | ||||||
|     def test_multiple_connection_settings(self): |     def test_multiple_connection_settings(self): | ||||||
|         connect('mongoenginetest', alias='t1', host="localhost") |         connect('mongoenginetest', alias='t1', host="localhost") | ||||||
|  |  | ||||||
| @@ -371,19 +600,26 @@ class ConnectionTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         mongo_connections = mongoengine.connection._connections |         mongo_connections = mongoengine.connection._connections | ||||||
|         self.assertEqual(len(mongo_connections.items()), 2) |         self.assertEqual(len(mongo_connections.items()), 2) | ||||||
|         self.assertTrue('t1' in mongo_connections.keys()) |         self.assertIn('t1', mongo_connections.keys()) | ||||||
|         self.assertTrue('t2' in mongo_connections.keys()) |         self.assertIn('t2', mongo_connections.keys()) | ||||||
|         if not IS_PYMONGO_3: |  | ||||||
|             self.assertEqual(mongo_connections['t1'].host, 'localhost') |         # Handle PyMongo 3+ Async Connection | ||||||
|             self.assertEqual(mongo_connections['t2'].host, '127.0.0.1') |         # Ensure we are connected, throws ServerSelectionTimeoutError otherwise. | ||||||
|         else: |         # Purposely not catching exception to fail test if thrown. | ||||||
|             # Handle PyMongo 3+ Async Connection |         mongo_connections['t1'].server_info() | ||||||
|             # Ensure we are connected, throws ServerSelectionTimeoutError otherwise. |         mongo_connections['t2'].server_info() | ||||||
|             # Purposely not catching exception to fail test if thrown. |         self.assertEqual(mongo_connections['t1'].address[0], 'localhost') | ||||||
|             mongo_connections['t1'].server_info() |         self.assertEqual(mongo_connections['t2'].address[0], '127.0.0.1') | ||||||
|             mongo_connections['t2'].server_info() |  | ||||||
|             self.assertEqual(mongo_connections['t1'].address[0], 'localhost') |     def test_connect_2_databases_uses_same_client_if_only_dbname_differs(self): | ||||||
|             self.assertEqual(mongo_connections['t2'].address[0], '127.0.0.1') |         c1 = connect(alias='testdb1', db='testdb1') | ||||||
|  |         c2 = connect(alias='testdb2', db='testdb2') | ||||||
|  |         self.assertIs(c1, c2) | ||||||
|  |  | ||||||
|  |     def test_connect_2_databases_uses_different_client_if_different_parameters(self): | ||||||
|  |         c1 = connect(alias='testdb1', db='testdb1', username='u1') | ||||||
|  |         c2 = connect(alias='testdb2', db='testdb2', username='u2') | ||||||
|  |         self.assertIsNot(c1, c2) | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|   | |||||||
| @@ -5,6 +5,7 @@ from mongoengine.connection import get_db | |||||||
| from mongoengine.context_managers import (switch_db, switch_collection, | from mongoengine.context_managers import (switch_db, switch_collection, | ||||||
|                                           no_sub_classes, no_dereference, |                                           no_sub_classes, no_dereference, | ||||||
|                                           query_counter) |                                           query_counter) | ||||||
|  | from mongoengine.pymongo_support import count_documents | ||||||
|  |  | ||||||
|  |  | ||||||
| class ContextManagersTest(unittest.TestCase): | class ContextManagersTest(unittest.TestCase): | ||||||
| @@ -36,14 +37,15 @@ class ContextManagersTest(unittest.TestCase): | |||||||
|  |  | ||||||
|     def test_switch_collection_context_manager(self): |     def test_switch_collection_context_manager(self): | ||||||
|         connect('mongoenginetest') |         connect('mongoenginetest') | ||||||
|         register_connection('testdb-1', 'mongoenginetest2') |         register_connection(alias='testdb-1', db='mongoenginetest2') | ||||||
|  |  | ||||||
|         class Group(Document): |         class Group(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|  |  | ||||||
|         Group.drop_collection() |         Group.drop_collection()         # drops in default | ||||||
|  |  | ||||||
|         with switch_collection(Group, 'group1') as Group: |         with switch_collection(Group, 'group1') as Group: | ||||||
|             Group.drop_collection() |             Group.drop_collection()     # drops in group1 | ||||||
|  |  | ||||||
|         Group(name="hello - group").save() |         Group(name="hello - group").save() | ||||||
|         self.assertEqual(1, Group.objects.count()) |         self.assertEqual(1, Group.objects.count()) | ||||||
| @@ -89,15 +91,15 @@ class ContextManagersTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         with no_dereference(Group) as Group: |         with no_dereference(Group) as Group: | ||||||
|             group = Group.objects.first() |             group = Group.objects.first() | ||||||
|             self.assertTrue(all([not isinstance(m, User) |             for m in group.members: | ||||||
|                                 for m in group.members])) |                 self.assertNotIsInstance(m, User) | ||||||
|             self.assertFalse(isinstance(group.ref, User)) |             self.assertNotIsInstance(group.ref, User) | ||||||
|             self.assertFalse(isinstance(group.generic, User)) |             self.assertNotIsInstance(group.generic, User) | ||||||
|  |  | ||||||
|         self.assertTrue(all([isinstance(m, User) |         for m in group.members: | ||||||
|                              for m in group.members])) |             self.assertIsInstance(m, User) | ||||||
|         self.assertTrue(isinstance(group.ref, User)) |         self.assertIsInstance(group.ref, User) | ||||||
|         self.assertTrue(isinstance(group.generic, User)) |         self.assertIsInstance(group.generic, User) | ||||||
|  |  | ||||||
|     def test_no_dereference_context_manager_dbref(self): |     def test_no_dereference_context_manager_dbref(self): | ||||||
|         """Ensure that DBRef items in ListFields aren't dereferenced. |         """Ensure that DBRef items in ListFields aren't dereferenced. | ||||||
| @@ -129,19 +131,17 @@ class ContextManagersTest(unittest.TestCase): | |||||||
|             group = Group.objects.first() |             group = Group.objects.first() | ||||||
|             self.assertTrue(all([not isinstance(m, User) |             self.assertTrue(all([not isinstance(m, User) | ||||||
|                                 for m in group.members])) |                                 for m in group.members])) | ||||||
|             self.assertFalse(isinstance(group.ref, User)) |             self.assertNotIsInstance(group.ref, User) | ||||||
|             self.assertFalse(isinstance(group.generic, User)) |             self.assertNotIsInstance(group.generic, User) | ||||||
|  |  | ||||||
|         self.assertTrue(all([isinstance(m, User) |         self.assertTrue(all([isinstance(m, User) | ||||||
|                              for m in group.members])) |                              for m in group.members])) | ||||||
|         self.assertTrue(isinstance(group.ref, User)) |         self.assertIsInstance(group.ref, User) | ||||||
|         self.assertTrue(isinstance(group.generic, User)) |         self.assertIsInstance(group.generic, User) | ||||||
|  |  | ||||||
|     def test_no_sub_classes(self): |     def test_no_sub_classes(self): | ||||||
|         class A(Document): |         class A(Document): | ||||||
|             x = IntField() |             x = IntField() | ||||||
|             y = IntField() |  | ||||||
|  |  | ||||||
|             meta = {'allow_inheritance': True} |             meta = {'allow_inheritance': True} | ||||||
|  |  | ||||||
|         class B(A): |         class B(A): | ||||||
| @@ -152,29 +152,29 @@ class ContextManagersTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         A.drop_collection() |         A.drop_collection() | ||||||
|  |  | ||||||
|         A(x=10, y=20).save() |         A(x=10).save() | ||||||
|         A(x=15, y=30).save() |         A(x=15).save() | ||||||
|         B(x=20, y=40).save() |         B(x=20).save() | ||||||
|         B(x=30, y=50).save() |         B(x=30).save() | ||||||
|         C(x=40, y=60).save() |         C(x=40).save() | ||||||
|  |  | ||||||
|         self.assertEqual(A.objects.count(), 5) |         self.assertEqual(A.objects.count(), 5) | ||||||
|         self.assertEqual(B.objects.count(), 3) |         self.assertEqual(B.objects.count(), 3) | ||||||
|         self.assertEqual(C.objects.count(), 1) |         self.assertEqual(C.objects.count(), 1) | ||||||
|  |  | ||||||
|         with no_sub_classes(A) as A: |         with no_sub_classes(A): | ||||||
|             self.assertEqual(A.objects.count(), 2) |             self.assertEqual(A.objects.count(), 2) | ||||||
|  |  | ||||||
|             for obj in A.objects: |             for obj in A.objects: | ||||||
|                 self.assertEqual(obj.__class__, A) |                 self.assertEqual(obj.__class__, A) | ||||||
|  |  | ||||||
|         with no_sub_classes(B) as B: |         with no_sub_classes(B): | ||||||
|             self.assertEqual(B.objects.count(), 2) |             self.assertEqual(B.objects.count(), 2) | ||||||
|  |  | ||||||
|             for obj in B.objects: |             for obj in B.objects: | ||||||
|                 self.assertEqual(obj.__class__, B) |                 self.assertEqual(obj.__class__, B) | ||||||
|  |  | ||||||
|         with no_sub_classes(C) as C: |         with no_sub_classes(C): | ||||||
|             self.assertEqual(C.objects.count(), 1) |             self.assertEqual(C.objects.count(), 1) | ||||||
|  |  | ||||||
|             for obj in C.objects: |             for obj in C.objects: | ||||||
| @@ -185,18 +185,133 @@ class ContextManagersTest(unittest.TestCase): | |||||||
|         self.assertEqual(B.objects.count(), 3) |         self.assertEqual(B.objects.count(), 3) | ||||||
|         self.assertEqual(C.objects.count(), 1) |         self.assertEqual(C.objects.count(), 1) | ||||||
|  |  | ||||||
|  |     def test_no_sub_classes_modification_to_document_class_are_temporary(self): | ||||||
|  |         class A(Document): | ||||||
|  |             x = IntField() | ||||||
|  |             meta = {'allow_inheritance': True} | ||||||
|  |  | ||||||
|  |         class B(A): | ||||||
|  |             z = IntField() | ||||||
|  |  | ||||||
|  |         self.assertEqual(A._subclasses, ('A', 'A.B')) | ||||||
|  |         with no_sub_classes(A): | ||||||
|  |             self.assertEqual(A._subclasses, ('A',)) | ||||||
|  |         self.assertEqual(A._subclasses, ('A', 'A.B')) | ||||||
|  |  | ||||||
|  |         self.assertEqual(B._subclasses, ('A.B',)) | ||||||
|  |         with no_sub_classes(B): | ||||||
|  |             self.assertEqual(B._subclasses, ('A.B',)) | ||||||
|  |         self.assertEqual(B._subclasses, ('A.B',)) | ||||||
|  |  | ||||||
|  |     def test_no_subclass_context_manager_does_not_swallow_exception(self): | ||||||
|  |         class User(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         with self.assertRaises(TypeError): | ||||||
|  |             with no_sub_classes(User): | ||||||
|  |                 raise TypeError() | ||||||
|  |  | ||||||
|  |     def test_query_counter_does_not_swallow_exception(self): | ||||||
|  |  | ||||||
|  |         with self.assertRaises(TypeError): | ||||||
|  |             with query_counter() as q: | ||||||
|  |                 raise TypeError() | ||||||
|  |  | ||||||
|  |     def test_query_counter_temporarily_modifies_profiling_level(self): | ||||||
|  |         connect('mongoenginetest') | ||||||
|  |         db = get_db() | ||||||
|  |  | ||||||
|  |         initial_profiling_level = db.profiling_level() | ||||||
|  |  | ||||||
|  |         try: | ||||||
|  |             NEW_LEVEL = 1 | ||||||
|  |             db.set_profiling_level(NEW_LEVEL) | ||||||
|  |             self.assertEqual(db.profiling_level(), NEW_LEVEL) | ||||||
|  |             with query_counter() as q: | ||||||
|  |                 self.assertEqual(db.profiling_level(), 2) | ||||||
|  |             self.assertEqual(db.profiling_level(), NEW_LEVEL) | ||||||
|  |         except Exception: | ||||||
|  |             db.set_profiling_level(initial_profiling_level)    # Ensures it gets reseted no matter the outcome of the test | ||||||
|  |             raise | ||||||
|  |  | ||||||
|     def test_query_counter(self): |     def test_query_counter(self): | ||||||
|         connect('mongoenginetest') |         connect('mongoenginetest') | ||||||
|         db = get_db() |         db = get_db() | ||||||
|         db.test.find({}) |  | ||||||
|  |         collection = db.query_counter | ||||||
|  |         collection.drop() | ||||||
|  |  | ||||||
|  |         def issue_1_count_query(): | ||||||
|  |             count_documents(collection, {}) | ||||||
|  |  | ||||||
|  |         def issue_1_insert_query(): | ||||||
|  |             collection.insert_one({'test': 'garbage'}) | ||||||
|  |  | ||||||
|  |         def issue_1_find_query(): | ||||||
|  |             collection.find_one() | ||||||
|  |  | ||||||
|  |         counter = 0 | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, counter) | ||||||
|  |             self.assertEqual(q, counter)    # Ensures previous count query did not get counted | ||||||
|  |  | ||||||
|  |             for _ in range(10): | ||||||
|  |                 issue_1_insert_query() | ||||||
|  |                 counter += 1 | ||||||
|  |             self.assertEqual(q, counter) | ||||||
|  |  | ||||||
|  |             for _ in range(4): | ||||||
|  |                 issue_1_find_query() | ||||||
|  |                 counter += 1 | ||||||
|  |             self.assertEqual(q, counter) | ||||||
|  |  | ||||||
|  |             for _ in range(3): | ||||||
|  |                 issue_1_count_query() | ||||||
|  |                 counter += 1 | ||||||
|  |             self.assertEqual(q, counter) | ||||||
|  |  | ||||||
|  |             self.assertEqual(int(q), counter)       # test __int__ | ||||||
|  |             self.assertEqual(repr(q), str(int(q)))  # test __repr__ | ||||||
|  |             self.assertGreater(q, -1)               # test __gt__ | ||||||
|  |             self.assertGreaterEqual(q, int(q))      # test __gte__ | ||||||
|  |             self.assertNotEqual(q, -1) | ||||||
|  |             self.assertLess(q, 1000) | ||||||
|  |             self.assertLessEqual(q, int(q)) | ||||||
|  |  | ||||||
|  |     def test_query_counter_counts_getmore_queries(self): | ||||||
|  |         connect('mongoenginetest') | ||||||
|  |         db = get_db() | ||||||
|  |  | ||||||
|  |         collection = db.query_counter | ||||||
|  |         collection.drop() | ||||||
|  |  | ||||||
|  |         many_docs = [{'test': 'garbage %s' % i} for i in range(150)] | ||||||
|  |         collection.insert_many(many_docs)   # first batch of documents contains 101 documents | ||||||
|  |  | ||||||
|         with query_counter() as q: |         with query_counter() as q: | ||||||
|             self.assertEqual(0, q) |             self.assertEqual(q, 0) | ||||||
|  |             list(collection.find()) | ||||||
|  |             self.assertEqual(q, 2)  # 1st select + 1 getmore | ||||||
|  |  | ||||||
|             for i in range(1, 51): |     def test_query_counter_ignores_particular_queries(self): | ||||||
|                 db.test.find({}).count() |         connect('mongoenginetest') | ||||||
|  |         db = get_db() | ||||||
|  |  | ||||||
|  |         collection = db.query_counter | ||||||
|  |         collection.insert_many([{'test': 'garbage %s' % i} for i in range(10)]) | ||||||
|  |  | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |             cursor = collection.find() | ||||||
|  |             self.assertEqual(q, 0)      # cursor wasn't opened yet | ||||||
|  |             _ = next(cursor)            # opens the cursor and fires the find query | ||||||
|  |             self.assertEqual(q, 1) | ||||||
|  |  | ||||||
|  |             cursor.close()              # issues a `killcursors` query that is ignored by the context | ||||||
|  |             self.assertEqual(q, 1) | ||||||
|  |             _ = db.system.indexes.find_one()    # queries on db.system.indexes are ignored as well | ||||||
|  |             self.assertEqual(q, 1) | ||||||
|  |  | ||||||
|             self.assertEqual(50, q) |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|     unittest.main() |     unittest.main() | ||||||
|   | |||||||
| @@ -1,6 +1,361 @@ | |||||||
| import unittest | import unittest | ||||||
|  | from six import iterkeys | ||||||
|  |  | ||||||
| from mongoengine.base.datastructures import StrictDict | from mongoengine import Document | ||||||
|  | from mongoengine.base.datastructures import StrictDict, BaseList, BaseDict | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class DocumentStub(object): | ||||||
|  |     def __init__(self): | ||||||
|  |         self._changed_fields = [] | ||||||
|  |  | ||||||
|  |     def _mark_as_changed(self, key): | ||||||
|  |         self._changed_fields.append(key) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestBaseDict(unittest.TestCase): | ||||||
|  |  | ||||||
|  |     @staticmethod | ||||||
|  |     def _get_basedict(dict_items): | ||||||
|  |         """Get a BaseList bound to a fake document instance""" | ||||||
|  |         fake_doc = DocumentStub() | ||||||
|  |         base_list = BaseDict(dict_items, instance=None, name='my_name') | ||||||
|  |         base_list._instance = fake_doc  # hack to inject the mock, it does not work in the constructor | ||||||
|  |         return base_list | ||||||
|  |  | ||||||
|  |     def test___init___(self): | ||||||
|  |         class MyDoc(Document): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         dict_items = {'k': 'v'} | ||||||
|  |         doc = MyDoc() | ||||||
|  |         base_dict = BaseDict(dict_items, instance=doc, name='my_name') | ||||||
|  |         self.assertIsInstance(base_dict._instance, Document) | ||||||
|  |         self.assertEqual(base_dict._name, 'my_name') | ||||||
|  |         self.assertEqual(base_dict, dict_items) | ||||||
|  |  | ||||||
|  |     def test_setdefault_calls_mark_as_changed(self): | ||||||
|  |         base_dict = self._get_basedict({}) | ||||||
|  |         base_dict.setdefault('k', 'v') | ||||||
|  |         self.assertEqual(base_dict._instance._changed_fields, [base_dict._name]) | ||||||
|  |  | ||||||
|  |     def test_popitems_calls_mark_as_changed(self): | ||||||
|  |         base_dict = self._get_basedict({'k': 'v'}) | ||||||
|  |         self.assertEqual(base_dict.popitem(), ('k', 'v')) | ||||||
|  |         self.assertEqual(base_dict._instance._changed_fields, [base_dict._name]) | ||||||
|  |         self.assertFalse(base_dict) | ||||||
|  |  | ||||||
|  |     def test_pop_calls_mark_as_changed(self): | ||||||
|  |         base_dict = self._get_basedict({'k': 'v'}) | ||||||
|  |         self.assertEqual(base_dict.pop('k'), 'v') | ||||||
|  |         self.assertEqual(base_dict._instance._changed_fields, [base_dict._name]) | ||||||
|  |         self.assertFalse(base_dict) | ||||||
|  |  | ||||||
|  |     def test_pop_calls_does_not_mark_as_changed_when_it_fails(self): | ||||||
|  |         base_dict = self._get_basedict({'k': 'v'}) | ||||||
|  |         with self.assertRaises(KeyError): | ||||||
|  |             base_dict.pop('X') | ||||||
|  |         self.assertFalse(base_dict._instance._changed_fields) | ||||||
|  |  | ||||||
|  |     def test_clear_calls_mark_as_changed(self): | ||||||
|  |         base_dict = self._get_basedict({'k': 'v'}) | ||||||
|  |         base_dict.clear() | ||||||
|  |         self.assertEqual(base_dict._instance._changed_fields, ['my_name']) | ||||||
|  |         self.assertEqual(base_dict, {}) | ||||||
|  |  | ||||||
|  |     def test___delitem___calls_mark_as_changed(self): | ||||||
|  |         base_dict = self._get_basedict({'k': 'v'}) | ||||||
|  |         del base_dict['k'] | ||||||
|  |         self.assertEqual(base_dict._instance._changed_fields, ['my_name.k']) | ||||||
|  |         self.assertEqual(base_dict, {}) | ||||||
|  |  | ||||||
|  |     def test___getitem____KeyError(self): | ||||||
|  |         base_dict = self._get_basedict({}) | ||||||
|  |         with self.assertRaises(KeyError): | ||||||
|  |             base_dict['new'] | ||||||
|  |  | ||||||
|  |     def test___getitem____simple_value(self): | ||||||
|  |         base_dict = self._get_basedict({'k': 'v'}) | ||||||
|  |         base_dict['k'] = 'v' | ||||||
|  |  | ||||||
|  |     def test___getitem____sublist_gets_converted_to_BaseList(self): | ||||||
|  |         base_dict = self._get_basedict({'k': [0, 1, 2]}) | ||||||
|  |         sub_list = base_dict['k'] | ||||||
|  |         self.assertEqual(sub_list, [0, 1, 2]) | ||||||
|  |         self.assertIsInstance(sub_list, BaseList) | ||||||
|  |         self.assertIs(sub_list._instance, base_dict._instance) | ||||||
|  |         self.assertEqual(sub_list._name, 'my_name.k') | ||||||
|  |         self.assertEqual(base_dict._instance._changed_fields, []) | ||||||
|  |  | ||||||
|  |         # Challenge mark_as_changed from sublist | ||||||
|  |         sub_list[1] = None | ||||||
|  |         self.assertEqual(base_dict._instance._changed_fields, ['my_name.k.1']) | ||||||
|  |  | ||||||
|  |     def test___getitem____subdict_gets_converted_to_BaseDict(self): | ||||||
|  |         base_dict = self._get_basedict({'k': {'subk': 'subv'}}) | ||||||
|  |         sub_dict = base_dict['k'] | ||||||
|  |         self.assertEqual(sub_dict, {'subk': 'subv'}) | ||||||
|  |         self.assertIsInstance(sub_dict, BaseDict) | ||||||
|  |         self.assertIs(sub_dict._instance, base_dict._instance) | ||||||
|  |         self.assertEqual(sub_dict._name, 'my_name.k') | ||||||
|  |         self.assertEqual(base_dict._instance._changed_fields, []) | ||||||
|  |  | ||||||
|  |         # Challenge mark_as_changed from subdict | ||||||
|  |         sub_dict['subk'] = None | ||||||
|  |         self.assertEqual(base_dict._instance._changed_fields, ['my_name.k.subk']) | ||||||
|  |  | ||||||
|  |     def test_get_sublist_gets_converted_to_BaseList_just_like__getitem__(self): | ||||||
|  |         base_dict = self._get_basedict({'k': [0, 1, 2]}) | ||||||
|  |         sub_list = base_dict.get('k') | ||||||
|  |         self.assertEqual(sub_list, [0, 1, 2]) | ||||||
|  |         self.assertIsInstance(sub_list, BaseList) | ||||||
|  |  | ||||||
|  |     def test_get_returns_the_same_as___getitem__(self): | ||||||
|  |         base_dict = self._get_basedict({'k': [0, 1, 2]}) | ||||||
|  |         get_ = base_dict.get('k') | ||||||
|  |         getitem_ = base_dict['k'] | ||||||
|  |         self.assertEqual(get_, getitem_) | ||||||
|  |  | ||||||
|  |     def test_get_default(self): | ||||||
|  |         base_dict = self._get_basedict({}) | ||||||
|  |         sentinel = object() | ||||||
|  |         self.assertEqual(base_dict.get('new'), None) | ||||||
|  |         self.assertIs(base_dict.get('new', sentinel), sentinel) | ||||||
|  |  | ||||||
|  |     def test___setitem___calls_mark_as_changed(self): | ||||||
|  |         base_dict = self._get_basedict({}) | ||||||
|  |         base_dict['k'] = 'v' | ||||||
|  |         self.assertEqual(base_dict._instance._changed_fields, ['my_name.k']) | ||||||
|  |         self.assertEqual(base_dict, {'k': 'v'}) | ||||||
|  |  | ||||||
|  |     def test_update_calls_mark_as_changed(self): | ||||||
|  |         base_dict = self._get_basedict({}) | ||||||
|  |         base_dict.update({'k': 'v'}) | ||||||
|  |         self.assertEqual(base_dict._instance._changed_fields, ['my_name']) | ||||||
|  |  | ||||||
|  |     def test___setattr____not_tracked_by_changes(self): | ||||||
|  |         base_dict = self._get_basedict({}) | ||||||
|  |         base_dict.a_new_attr = 'test' | ||||||
|  |         self.assertEqual(base_dict._instance._changed_fields, []) | ||||||
|  |  | ||||||
|  |     def test___delattr____tracked_by_changes(self): | ||||||
|  |         # This is probably a bug as __setattr__ is not tracked | ||||||
|  |         # This is even bad because it could be that there is an attribute | ||||||
|  |         # with the same name as a key | ||||||
|  |         base_dict = self._get_basedict({}) | ||||||
|  |         base_dict.a_new_attr = 'test' | ||||||
|  |         del base_dict.a_new_attr | ||||||
|  |         self.assertEqual(base_dict._instance._changed_fields, ['my_name.a_new_attr']) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestBaseList(unittest.TestCase): | ||||||
|  |  | ||||||
|  |     @staticmethod | ||||||
|  |     def _get_baselist(list_items): | ||||||
|  |         """Get a BaseList bound to a fake document instance""" | ||||||
|  |         fake_doc = DocumentStub() | ||||||
|  |         base_list = BaseList(list_items, instance=None, name='my_name') | ||||||
|  |         base_list._instance = fake_doc  # hack to inject the mock, it does not work in the constructor | ||||||
|  |         return base_list | ||||||
|  |  | ||||||
|  |     def test___init___(self): | ||||||
|  |         class MyDoc(Document): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         list_items = [True] | ||||||
|  |         doc = MyDoc() | ||||||
|  |         base_list = BaseList(list_items, instance=doc, name='my_name') | ||||||
|  |         self.assertIsInstance(base_list._instance, Document) | ||||||
|  |         self.assertEqual(base_list._name, 'my_name') | ||||||
|  |         self.assertEqual(base_list, list_items) | ||||||
|  |  | ||||||
|  |     def test___iter__(self): | ||||||
|  |         values = [True, False, True, False] | ||||||
|  |         base_list = BaseList(values, instance=None, name='my_name') | ||||||
|  |         self.assertEqual(values, list(base_list)) | ||||||
|  |  | ||||||
|  |     def test___iter___allow_modification_while_iterating_withou_error(self): | ||||||
|  |         # regular list allows for this, thus this subclass must comply to that | ||||||
|  |         base_list = BaseList([True, False, True, False], instance=None, name='my_name') | ||||||
|  |         for idx, val in enumerate(base_list): | ||||||
|  |             if val: | ||||||
|  |                 base_list.pop(idx) | ||||||
|  |  | ||||||
|  |     def test_append_calls_mark_as_changed(self): | ||||||
|  |         base_list = self._get_baselist([]) | ||||||
|  |         self.assertFalse(base_list._instance._changed_fields) | ||||||
|  |         base_list.append(True) | ||||||
|  |         self.assertEqual(base_list._instance._changed_fields, ['my_name']) | ||||||
|  |  | ||||||
|  |     def test_subclass_append(self): | ||||||
|  |         # Due to the way mark_as_changed_wrapper is implemented | ||||||
|  |         # it is good to test subclasses | ||||||
|  |         class SubBaseList(BaseList): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         base_list = SubBaseList([], instance=None, name='my_name') | ||||||
|  |         base_list.append(True) | ||||||
|  |  | ||||||
|  |     def test___getitem__using_simple_index(self): | ||||||
|  |         base_list = self._get_baselist([0, 1, 2]) | ||||||
|  |         self.assertEqual(base_list[0], 0) | ||||||
|  |         self.assertEqual(base_list[1], 1) | ||||||
|  |         self.assertEqual(base_list[-1], 2) | ||||||
|  |  | ||||||
|  |     def test___getitem__using_slice(self): | ||||||
|  |         base_list = self._get_baselist([0, 1, 2]) | ||||||
|  |         self.assertEqual(base_list[1:3], [1, 2]) | ||||||
|  |         self.assertEqual(base_list[0:3:2], [0, 2]) | ||||||
|  |  | ||||||
|  |     def test___getitem___using_slice_returns_list(self): | ||||||
|  |         # Bug: using slice does not properly handles the instance | ||||||
|  |         # and mark_as_changed behaviour. | ||||||
|  |         base_list = self._get_baselist([0, 1, 2]) | ||||||
|  |         sliced = base_list[1:3] | ||||||
|  |         self.assertEqual(sliced, [1, 2]) | ||||||
|  |         self.assertIsInstance(sliced, list) | ||||||
|  |         self.assertEqual(base_list._instance._changed_fields, []) | ||||||
|  |  | ||||||
|  |     def test___getitem__sublist_returns_BaseList_bound_to_instance(self): | ||||||
|  |         base_list = self._get_baselist( | ||||||
|  |             [ | ||||||
|  |                 [1, 2], | ||||||
|  |                 [3, 4] | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |         sub_list = base_list[0] | ||||||
|  |         self.assertEqual(sub_list, [1, 2]) | ||||||
|  |         self.assertIsInstance(sub_list, BaseList) | ||||||
|  |         self.assertIs(sub_list._instance, base_list._instance) | ||||||
|  |         self.assertEqual(sub_list._name, 'my_name.0') | ||||||
|  |         self.assertEqual(base_list._instance._changed_fields, []) | ||||||
|  |  | ||||||
|  |         # Challenge mark_as_changed from sublist | ||||||
|  |         sub_list[1] = None | ||||||
|  |         self.assertEqual(base_list._instance._changed_fields, ['my_name.0.1']) | ||||||
|  |  | ||||||
|  |     def test___getitem__subdict_returns_BaseList_bound_to_instance(self): | ||||||
|  |         base_list = self._get_baselist( | ||||||
|  |             [ | ||||||
|  |                 {'subk': 'subv'} | ||||||
|  |             ] | ||||||
|  |         ) | ||||||
|  |         sub_dict = base_list[0] | ||||||
|  |         self.assertEqual(sub_dict, {'subk': 'subv'}) | ||||||
|  |         self.assertIsInstance(sub_dict, BaseDict) | ||||||
|  |         self.assertIs(sub_dict._instance, base_list._instance) | ||||||
|  |         self.assertEqual(sub_dict._name, 'my_name.0') | ||||||
|  |         self.assertEqual(base_list._instance._changed_fields, []) | ||||||
|  |  | ||||||
|  |         # Challenge mark_as_changed from subdict | ||||||
|  |         sub_dict['subk'] = None | ||||||
|  |         self.assertEqual(base_list._instance._changed_fields, ['my_name.0.subk']) | ||||||
|  |  | ||||||
|  |     def test_extend_calls_mark_as_changed(self): | ||||||
|  |         base_list = self._get_baselist([]) | ||||||
|  |         base_list.extend([True]) | ||||||
|  |         self.assertEqual(base_list._instance._changed_fields, ['my_name']) | ||||||
|  |  | ||||||
|  |     def test_insert_calls_mark_as_changed(self): | ||||||
|  |         base_list = self._get_baselist([]) | ||||||
|  |         base_list.insert(0, True) | ||||||
|  |         self.assertEqual(base_list._instance._changed_fields, ['my_name']) | ||||||
|  |  | ||||||
|  |     def test_remove_calls_mark_as_changed(self): | ||||||
|  |         base_list = self._get_baselist([True]) | ||||||
|  |         base_list.remove(True) | ||||||
|  |         self.assertEqual(base_list._instance._changed_fields, ['my_name']) | ||||||
|  |  | ||||||
|  |     def test_remove_not_mark_as_changed_when_it_fails(self): | ||||||
|  |         base_list = self._get_baselist([True]) | ||||||
|  |         with self.assertRaises(ValueError): | ||||||
|  |             base_list.remove(False) | ||||||
|  |         self.assertFalse(base_list._instance._changed_fields) | ||||||
|  |  | ||||||
|  |     def test_pop_calls_mark_as_changed(self): | ||||||
|  |         base_list = self._get_baselist([True]) | ||||||
|  |         base_list.pop() | ||||||
|  |         self.assertEqual(base_list._instance._changed_fields, ['my_name']) | ||||||
|  |  | ||||||
|  |     def test_reverse_calls_mark_as_changed(self): | ||||||
|  |         base_list = self._get_baselist([True, False]) | ||||||
|  |         base_list.reverse() | ||||||
|  |         self.assertEqual(base_list._instance._changed_fields, ['my_name']) | ||||||
|  |  | ||||||
|  |     def test___delitem___calls_mark_as_changed(self): | ||||||
|  |         base_list = self._get_baselist([True]) | ||||||
|  |         del base_list[0] | ||||||
|  |         self.assertEqual(base_list._instance._changed_fields, ['my_name']) | ||||||
|  |  | ||||||
|  |     def test___setitem___calls_with_full_slice_mark_as_changed(self): | ||||||
|  |         base_list = self._get_baselist([]) | ||||||
|  |         base_list[:] = [0, 1]      # Will use __setslice__ under py2 and __setitem__ under py3 | ||||||
|  |         self.assertEqual(base_list._instance._changed_fields, ['my_name']) | ||||||
|  |         self.assertEqual(base_list, [0, 1]) | ||||||
|  |  | ||||||
|  |     def test___setitem___calls_with_partial_slice_mark_as_changed(self): | ||||||
|  |         base_list = self._get_baselist([0, 1, 2]) | ||||||
|  |         base_list[0:2] = [1, 0]     # Will use __setslice__ under py2 and __setitem__ under py3 | ||||||
|  |         self.assertEqual(base_list._instance._changed_fields, ['my_name']) | ||||||
|  |         self.assertEqual(base_list, [1, 0, 2]) | ||||||
|  |  | ||||||
|  |     def test___setitem___calls_with_step_slice_mark_as_changed(self): | ||||||
|  |         base_list = self._get_baselist([0, 1, 2]) | ||||||
|  |         base_list[0:3:2] = [-1, -2]   # uses __setitem__ in both py2 & 3 | ||||||
|  |         self.assertEqual(base_list._instance._changed_fields, ['my_name']) | ||||||
|  |         self.assertEqual(base_list, [-1, 1, -2]) | ||||||
|  |  | ||||||
|  |     def test___setitem___with_slice(self): | ||||||
|  |         base_list = self._get_baselist([0, 1, 2, 3, 4, 5]) | ||||||
|  |         base_list[0:6:2] = [None, None, None] | ||||||
|  |         self.assertEqual(base_list._instance._changed_fields, ['my_name']) | ||||||
|  |         self.assertEqual(base_list, [None, 1, None, 3, None, 5]) | ||||||
|  |  | ||||||
|  |     def test___setitem___item_0_calls_mark_as_changed(self): | ||||||
|  |         base_list = self._get_baselist([True]) | ||||||
|  |         base_list[0] = False | ||||||
|  |         self.assertEqual(base_list._instance._changed_fields, ['my_name']) | ||||||
|  |         self.assertEqual(base_list, [False]) | ||||||
|  |  | ||||||
|  |     def test___setitem___item_1_calls_mark_as_changed(self): | ||||||
|  |         base_list = self._get_baselist([True, True]) | ||||||
|  |         base_list[1] = False | ||||||
|  |         self.assertEqual(base_list._instance._changed_fields, ['my_name.1']) | ||||||
|  |         self.assertEqual(base_list, [True, False]) | ||||||
|  |  | ||||||
|  |     def test___delslice___calls_mark_as_changed(self): | ||||||
|  |         base_list = self._get_baselist([0, 1]) | ||||||
|  |         del base_list[0:1] | ||||||
|  |         self.assertEqual(base_list._instance._changed_fields, ['my_name']) | ||||||
|  |         self.assertEqual(base_list, [1]) | ||||||
|  |  | ||||||
|  |     def test___iadd___calls_mark_as_changed(self): | ||||||
|  |         base_list = self._get_baselist([True]) | ||||||
|  |         base_list += [False] | ||||||
|  |         self.assertEqual(base_list._instance._changed_fields, ['my_name']) | ||||||
|  |  | ||||||
|  |     def test___imul___calls_mark_as_changed(self): | ||||||
|  |         base_list = self._get_baselist([True]) | ||||||
|  |         self.assertEqual(base_list._instance._changed_fields, []) | ||||||
|  |         base_list *= 2 | ||||||
|  |         self.assertEqual(base_list._instance._changed_fields, ['my_name']) | ||||||
|  |  | ||||||
|  |     def test_sort_calls_not_marked_as_changed_when_it_fails(self): | ||||||
|  |         base_list = self._get_baselist([True]) | ||||||
|  |         with self.assertRaises(TypeError): | ||||||
|  |             base_list.sort(key=1) | ||||||
|  |  | ||||||
|  |         self.assertEqual(base_list._instance._changed_fields, []) | ||||||
|  |  | ||||||
|  |     def test_sort_calls_mark_as_changed(self): | ||||||
|  |         base_list = self._get_baselist([True, False]) | ||||||
|  |         base_list.sort() | ||||||
|  |         self.assertEqual(base_list._instance._changed_fields, ['my_name']) | ||||||
|  |  | ||||||
|  |     def test_sort_calls_with_key(self): | ||||||
|  |         base_list = self._get_baselist([1, 2, 11]) | ||||||
|  |         base_list.sort(key=lambda i: str(i)) | ||||||
|  |         self.assertEqual(base_list, [1, 11, 2]) | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestStrictDict(unittest.TestCase): | class TestStrictDict(unittest.TestCase): | ||||||
| @@ -14,6 +369,20 @@ class TestStrictDict(unittest.TestCase): | |||||||
|         d = self.dtype(a=1, b=1, c=1) |         d = self.dtype(a=1, b=1, c=1) | ||||||
|         self.assertEqual((d.a, d.b, d.c), (1, 1, 1)) |         self.assertEqual((d.a, d.b, d.c), (1, 1, 1)) | ||||||
|  |  | ||||||
|  |     def test_iterkeys(self): | ||||||
|  |         d = self.dtype(a=1) | ||||||
|  |         self.assertEqual(list(iterkeys(d)), ['a']) | ||||||
|  |  | ||||||
|  |     def test_len(self): | ||||||
|  |         d = self.dtype(a=1) | ||||||
|  |         self.assertEqual(len(d), 1) | ||||||
|  |  | ||||||
|  |     def test_pop(self): | ||||||
|  |         d = self.dtype(a=1) | ||||||
|  |         self.assertIn('a', d) | ||||||
|  |         d.pop('a') | ||||||
|  |         self.assertNotIn('a', d) | ||||||
|  |  | ||||||
|     def test_repr(self): |     def test_repr(self): | ||||||
|         d = self.dtype(a=1, b=2, c=3) |         d = self.dtype(a=1, b=2, c=3) | ||||||
|         self.assertEqual(repr(d), '{"a": 1, "b": 2, "c": 3}') |         self.assertEqual(repr(d), '{"a": 1, "b": 2, "c": 3}') | ||||||
| @@ -72,8 +441,8 @@ class TestStrictDict(unittest.TestCase): | |||||||
|  |  | ||||||
|     def test_mappings_protocol(self): |     def test_mappings_protocol(self): | ||||||
|         d = self.dtype(a=1, b=2) |         d = self.dtype(a=1, b=2) | ||||||
|         assert dict(d) == {'a': 1, 'b': 2} |         self.assertEqual(dict(d), {'a': 1, 'b': 2}) | ||||||
|         assert dict(**d) == {'a': 1, 'b': 2} |         self.assertEqual(dict(**d), {'a': 1, 'b': 2}) | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|   | |||||||
| @@ -2,6 +2,7 @@ | |||||||
| import unittest | import unittest | ||||||
|  |  | ||||||
| from bson import DBRef, ObjectId | from bson import DBRef, ObjectId | ||||||
|  | from six import iteritems | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| from mongoengine.connection import get_db | from mongoengine.connection import get_db | ||||||
| @@ -104,6 +105,14 @@ class FieldTest(unittest.TestCase): | |||||||
|  |  | ||||||
|             [m for m in group_obj.members] |             [m for m in group_obj.members] | ||||||
|             self.assertEqual(q, 2) |             self.assertEqual(q, 2) | ||||||
|  |             self.assertTrue(group_obj._data['members']._dereferenced) | ||||||
|  |  | ||||||
|  |             # verifies that no additional queries gets executed | ||||||
|  |             # if we re-iterate over the ListField once it is | ||||||
|  |             # dereferenced | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |             self.assertTrue(group_obj._data['members']._dereferenced) | ||||||
|  |  | ||||||
|         # Document select_related |         # Document select_related | ||||||
|         with query_counter() as q: |         with query_counter() as q: | ||||||
| @@ -124,6 +133,46 @@ class FieldTest(unittest.TestCase): | |||||||
|                 [m for m in group_obj.members] |                 [m for m in group_obj.members] | ||||||
|                 self.assertEqual(q, 2) |                 self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |     def test_list_item_dereference_orphan_dbref(self): | ||||||
|  |         """Ensure that orphan DBRef items in ListFields are dereferenced. | ||||||
|  |         """ | ||||||
|  |         class User(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Group(Document): | ||||||
|  |             members = ListField(ReferenceField(User, dbref=False)) | ||||||
|  |  | ||||||
|  |         User.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |         for i in range(1, 51): | ||||||
|  |             user = User(name='user %s' % i) | ||||||
|  |             user.save() | ||||||
|  |  | ||||||
|  |         group = Group(members=User.objects) | ||||||
|  |         group.save() | ||||||
|  |         group.reload()  # Confirm reload works | ||||||
|  |  | ||||||
|  |         # Delete one User so one of the references in the | ||||||
|  |         # Group.members list is an orphan DBRef | ||||||
|  |         User.objects[0].delete() | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_obj = Group.objects.first() | ||||||
|  |             self.assertEqual(q, 1) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |             self.assertTrue(group_obj._data['members']._dereferenced) | ||||||
|  |  | ||||||
|  |             # verifies that no additional queries gets executed | ||||||
|  |             # if we re-iterate over the ListField once it is | ||||||
|  |             # dereferenced | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |             self.assertTrue(group_obj._data['members']._dereferenced) | ||||||
|  |  | ||||||
|         User.drop_collection() |         User.drop_collection() | ||||||
|         Group.drop_collection() |         Group.drop_collection() | ||||||
|  |  | ||||||
| @@ -200,8 +249,8 @@ class FieldTest(unittest.TestCase): | |||||||
|         group = Group(author=user, members=[user]).save() |         group = Group(author=user, members=[user]).save() | ||||||
|  |  | ||||||
|         raw_data = Group._get_collection().find_one() |         raw_data = Group._get_collection().find_one() | ||||||
|         self.assertTrue(isinstance(raw_data['author'], DBRef)) |         self.assertIsInstance(raw_data['author'], DBRef) | ||||||
|         self.assertTrue(isinstance(raw_data['members'][0], DBRef)) |         self.assertIsInstance(raw_data['members'][0], DBRef) | ||||||
|         group = Group.objects.first() |         group = Group.objects.first() | ||||||
|  |  | ||||||
|         self.assertEqual(group.author, user) |         self.assertEqual(group.author, user) | ||||||
| @@ -224,8 +273,8 @@ class FieldTest(unittest.TestCase): | |||||||
|         self.assertEqual(group.members, [user]) |         self.assertEqual(group.members, [user]) | ||||||
|  |  | ||||||
|         raw_data = Group._get_collection().find_one() |         raw_data = Group._get_collection().find_one() | ||||||
|         self.assertTrue(isinstance(raw_data['author'], ObjectId)) |         self.assertIsInstance(raw_data['author'], ObjectId) | ||||||
|         self.assertTrue(isinstance(raw_data['members'][0], ObjectId)) |         self.assertIsInstance(raw_data['members'][0], ObjectId) | ||||||
|  |  | ||||||
|     def test_recursive_reference(self): |     def test_recursive_reference(self): | ||||||
|         """Ensure that ReferenceFields can reference their own documents. |         """Ensure that ReferenceFields can reference their own documents. | ||||||
| @@ -469,7 +518,7 @@ class FieldTest(unittest.TestCase): | |||||||
|             self.assertEqual(q, 4) |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|             for m in group_obj.members: |             for m in group_obj.members: | ||||||
|                 self.assertTrue('User' in m.__class__.__name__) |                 self.assertIn('User', m.__class__.__name__) | ||||||
|  |  | ||||||
|         # Document select_related |         # Document select_related | ||||||
|         with query_counter() as q: |         with query_counter() as q: | ||||||
| @@ -485,7 +534,7 @@ class FieldTest(unittest.TestCase): | |||||||
|             self.assertEqual(q, 4) |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|             for m in group_obj.members: |             for m in group_obj.members: | ||||||
|                 self.assertTrue('User' in m.__class__.__name__) |                 self.assertIn('User', m.__class__.__name__) | ||||||
|  |  | ||||||
|         # Queryset select_related |         # Queryset select_related | ||||||
|         with query_counter() as q: |         with query_counter() as q: | ||||||
| @@ -502,7 +551,62 @@ class FieldTest(unittest.TestCase): | |||||||
|                 self.assertEqual(q, 4) |                 self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|                 for m in group_obj.members: |                 for m in group_obj.members: | ||||||
|                     self.assertTrue('User' in m.__class__.__name__) |                     self.assertIn('User', m.__class__.__name__) | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     def test_generic_reference_orphan_dbref(self): | ||||||
|  |         """Ensure that generic orphan DBRef items in ListFields are dereferenced. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class UserA(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class UserB(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class UserC(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Group(Document): | ||||||
|  |             members = ListField(GenericReferenceField()) | ||||||
|  |  | ||||||
|  |         UserA.drop_collection() | ||||||
|  |         UserB.drop_collection() | ||||||
|  |         UserC.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |         members = [] | ||||||
|  |         for i in range(1, 51): | ||||||
|  |             a = UserA(name='User A %s' % i) | ||||||
|  |             a.save() | ||||||
|  |  | ||||||
|  |             b = UserB(name='User B %s' % i) | ||||||
|  |             b.save() | ||||||
|  |  | ||||||
|  |             c = UserC(name='User C %s' % i) | ||||||
|  |             c.save() | ||||||
|  |  | ||||||
|  |             members += [a, b, c] | ||||||
|  |  | ||||||
|  |         group = Group(members=members) | ||||||
|  |         group.save() | ||||||
|  |  | ||||||
|  |         # Delete one UserA instance so that there is | ||||||
|  |         # an orphan DBRef in the GenericReference ListField | ||||||
|  |         UserA.objects[0].delete() | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_obj = Group.objects.first() | ||||||
|  |             self.assertEqual(q, 1) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |             self.assertTrue(group_obj._data['members']._dereferenced) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 4) | ||||||
|  |             self.assertTrue(group_obj._data['members']._dereferenced) | ||||||
|  |  | ||||||
|         UserA.drop_collection() |         UserA.drop_collection() | ||||||
|         UserB.drop_collection() |         UserB.drop_collection() | ||||||
| @@ -560,7 +664,7 @@ class FieldTest(unittest.TestCase): | |||||||
|             self.assertEqual(q, 4) |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|             for m in group_obj.members: |             for m in group_obj.members: | ||||||
|                 self.assertTrue('User' in m.__class__.__name__) |                 self.assertIn('User', m.__class__.__name__) | ||||||
|  |  | ||||||
|         # Document select_related |         # Document select_related | ||||||
|         with query_counter() as q: |         with query_counter() as q: | ||||||
| @@ -576,7 +680,7 @@ class FieldTest(unittest.TestCase): | |||||||
|             self.assertEqual(q, 4) |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|             for m in group_obj.members: |             for m in group_obj.members: | ||||||
|                 self.assertTrue('User' in m.__class__.__name__) |                 self.assertIn('User', m.__class__.__name__) | ||||||
|  |  | ||||||
|         # Queryset select_related |         # Queryset select_related | ||||||
|         with query_counter() as q: |         with query_counter() as q: | ||||||
| @@ -593,7 +697,7 @@ class FieldTest(unittest.TestCase): | |||||||
|                 self.assertEqual(q, 4) |                 self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|                 for m in group_obj.members: |                 for m in group_obj.members: | ||||||
|                     self.assertTrue('User' in m.__class__.__name__) |                     self.assertIn('User', m.__class__.__name__) | ||||||
|  |  | ||||||
|         UserA.drop_collection() |         UserA.drop_collection() | ||||||
|         UserB.drop_collection() |         UserB.drop_collection() | ||||||
| @@ -632,8 +736,8 @@ class FieldTest(unittest.TestCase): | |||||||
|             [m for m in group_obj.members] |             [m for m in group_obj.members] | ||||||
|             self.assertEqual(q, 2) |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|             for k, m in group_obj.members.iteritems(): |             for k, m in iteritems(group_obj.members): | ||||||
|                 self.assertTrue(isinstance(m, User)) |                 self.assertIsInstance(m, User) | ||||||
|  |  | ||||||
|         # Document select_related |         # Document select_related | ||||||
|         with query_counter() as q: |         with query_counter() as q: | ||||||
| @@ -645,8 +749,8 @@ class FieldTest(unittest.TestCase): | |||||||
|             [m for m in group_obj.members] |             [m for m in group_obj.members] | ||||||
|             self.assertEqual(q, 2) |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|             for k, m in group_obj.members.iteritems(): |             for k, m in iteritems(group_obj.members): | ||||||
|                 self.assertTrue(isinstance(m, User)) |                 self.assertIsInstance(m, User) | ||||||
|  |  | ||||||
|        # Queryset select_related |        # Queryset select_related | ||||||
|         with query_counter() as q: |         with query_counter() as q: | ||||||
| @@ -659,8 +763,8 @@ class FieldTest(unittest.TestCase): | |||||||
|                 [m for m in group_obj.members] |                 [m for m in group_obj.members] | ||||||
|                 self.assertEqual(q, 2) |                 self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|                 for k, m in group_obj.members.iteritems(): |                 for k, m in iteritems(group_obj.members): | ||||||
|                     self.assertTrue(isinstance(m, User)) |                     self.assertIsInstance(m, User) | ||||||
|  |  | ||||||
|         User.drop_collection() |         User.drop_collection() | ||||||
|         Group.drop_collection() |         Group.drop_collection() | ||||||
| @@ -714,8 +818,8 @@ class FieldTest(unittest.TestCase): | |||||||
|             [m for m in group_obj.members] |             [m for m in group_obj.members] | ||||||
|             self.assertEqual(q, 4) |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|             for k, m in group_obj.members.iteritems(): |             for k, m in iteritems(group_obj.members): | ||||||
|                 self.assertTrue('User' in m.__class__.__name__) |                 self.assertIn('User', m.__class__.__name__) | ||||||
|  |  | ||||||
|         # Document select_related |         # Document select_related | ||||||
|         with query_counter() as q: |         with query_counter() as q: | ||||||
| @@ -730,8 +834,8 @@ class FieldTest(unittest.TestCase): | |||||||
|             [m for m in group_obj.members] |             [m for m in group_obj.members] | ||||||
|             self.assertEqual(q, 4) |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|             for k, m in group_obj.members.iteritems(): |             for k, m in iteritems(group_obj.members): | ||||||
|                 self.assertTrue('User' in m.__class__.__name__) |                 self.assertIn('User', m.__class__.__name__) | ||||||
|  |  | ||||||
|         # Queryset select_related |         # Queryset select_related | ||||||
|         with query_counter() as q: |         with query_counter() as q: | ||||||
| @@ -747,8 +851,8 @@ class FieldTest(unittest.TestCase): | |||||||
|                 [m for m in group_obj.members] |                 [m for m in group_obj.members] | ||||||
|                 self.assertEqual(q, 4) |                 self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|                 for k, m in group_obj.members.iteritems(): |                 for k, m in iteritems(group_obj.members): | ||||||
|                     self.assertTrue('User' in m.__class__.__name__) |                     self.assertIn('User', m.__class__.__name__) | ||||||
|  |  | ||||||
|         Group.objects.delete() |         Group.objects.delete() | ||||||
|         Group().save() |         Group().save() | ||||||
| @@ -805,8 +909,8 @@ class FieldTest(unittest.TestCase): | |||||||
|             [m for m in group_obj.members] |             [m for m in group_obj.members] | ||||||
|             self.assertEqual(q, 2) |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|             for k, m in group_obj.members.iteritems(): |             for k, m in iteritems(group_obj.members): | ||||||
|                 self.assertTrue(isinstance(m, UserA)) |                 self.assertIsInstance(m, UserA) | ||||||
|  |  | ||||||
|         # Document select_related |         # Document select_related | ||||||
|         with query_counter() as q: |         with query_counter() as q: | ||||||
| @@ -821,8 +925,8 @@ class FieldTest(unittest.TestCase): | |||||||
|             [m for m in group_obj.members] |             [m for m in group_obj.members] | ||||||
|             self.assertEqual(q, 2) |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|             for k, m in group_obj.members.iteritems(): |             for k, m in iteritems(group_obj.members): | ||||||
|                 self.assertTrue(isinstance(m, UserA)) |                 self.assertIsInstance(m, UserA) | ||||||
|  |  | ||||||
|         # Queryset select_related |         # Queryset select_related | ||||||
|         with query_counter() as q: |         with query_counter() as q: | ||||||
| @@ -838,8 +942,8 @@ class FieldTest(unittest.TestCase): | |||||||
|                 [m for m in group_obj.members] |                 [m for m in group_obj.members] | ||||||
|                 self.assertEqual(q, 2) |                 self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|                 for k, m in group_obj.members.iteritems(): |                 for k, m in iteritems(group_obj.members): | ||||||
|                     self.assertTrue(isinstance(m, UserA)) |                     self.assertIsInstance(m, UserA) | ||||||
|  |  | ||||||
|         UserA.drop_collection() |         UserA.drop_collection() | ||||||
|         Group.drop_collection() |         Group.drop_collection() | ||||||
| @@ -893,8 +997,8 @@ class FieldTest(unittest.TestCase): | |||||||
|             [m for m in group_obj.members] |             [m for m in group_obj.members] | ||||||
|             self.assertEqual(q, 4) |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|             for k, m in group_obj.members.iteritems(): |             for k, m in iteritems(group_obj.members): | ||||||
|                 self.assertTrue('User' in m.__class__.__name__) |                 self.assertIn('User', m.__class__.__name__) | ||||||
|  |  | ||||||
|         # Document select_related |         # Document select_related | ||||||
|         with query_counter() as q: |         with query_counter() as q: | ||||||
| @@ -909,8 +1013,8 @@ class FieldTest(unittest.TestCase): | |||||||
|             [m for m in group_obj.members] |             [m for m in group_obj.members] | ||||||
|             self.assertEqual(q, 4) |             self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|             for k, m in group_obj.members.iteritems(): |             for k, m in iteritems(group_obj.members): | ||||||
|                 self.assertTrue('User' in m.__class__.__name__) |                 self.assertIn('User', m.__class__.__name__) | ||||||
|  |  | ||||||
|         # Queryset select_related |         # Queryset select_related | ||||||
|         with query_counter() as q: |         with query_counter() as q: | ||||||
| @@ -926,8 +1030,8 @@ class FieldTest(unittest.TestCase): | |||||||
|                 [m for m in group_obj.members] |                 [m for m in group_obj.members] | ||||||
|                 self.assertEqual(q, 4) |                 self.assertEqual(q, 4) | ||||||
|  |  | ||||||
|                 for k, m in group_obj.members.iteritems(): |                 for k, m in iteritems(group_obj.members): | ||||||
|                     self.assertTrue('User' in m.__class__.__name__) |                     self.assertIn('User', m.__class__.__name__) | ||||||
|  |  | ||||||
|         Group.objects.delete() |         Group.objects.delete() | ||||||
|         Group().save() |         Group().save() | ||||||
| @@ -1029,7 +1133,6 @@ class FieldTest(unittest.TestCase): | |||||||
|         self.assertEqual(type(foo.bar), Bar) |         self.assertEqual(type(foo.bar), Bar) | ||||||
|         self.assertEqual(type(foo.baz), Baz) |         self.assertEqual(type(foo.baz), Baz) | ||||||
|  |  | ||||||
|  |  | ||||||
|     def test_document_reload_reference_integrity(self): |     def test_document_reload_reference_integrity(self): | ||||||
|         """ |         """ | ||||||
|         Ensure reloading a document with multiple similar id |         Ensure reloading a document with multiple similar id | ||||||
| @@ -1065,7 +1168,6 @@ class FieldTest(unittest.TestCase): | |||||||
|         self.assertEqual(msg.author, user) |         self.assertEqual(msg.author, user) | ||||||
|         self.assertEqual(msg.author.name, 'new-name') |         self.assertEqual(msg.author.name, 'new-name') | ||||||
|  |  | ||||||
|  |  | ||||||
|     def test_list_lookup_not_checked_in_map(self): |     def test_list_lookup_not_checked_in_map(self): | ||||||
|         """Ensure we dereference list data correctly |         """Ensure we dereference list data correctly | ||||||
|         """ |         """ | ||||||
| @@ -1209,10 +1311,10 @@ class FieldTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         # Can't use query_counter across databases - so test the _data object |         # Can't use query_counter across databases - so test the _data object | ||||||
|         book = Book.objects.first() |         book = Book.objects.first() | ||||||
|         self.assertFalse(isinstance(book._data['author'], User)) |         self.assertNotIsInstance(book._data['author'], User) | ||||||
|  |  | ||||||
|         book.select_related() |         book.select_related() | ||||||
|         self.assertTrue(isinstance(book._data['author'], User)) |         self.assertIsInstance(book._data['author'], User) | ||||||
|  |  | ||||||
|     def test_non_ascii_pk(self): |     def test_non_ascii_pk(self): | ||||||
|         """ |         """ | ||||||
| @@ -1287,5 +1389,6 @@ class FieldTest(unittest.TestCase): | |||||||
|  |  | ||||||
|             self.assertEqual(q, 2) |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|     unittest.main() |     unittest.main() | ||||||
|   | |||||||
| @@ -1,23 +1,16 @@ | |||||||
| import unittest | import unittest | ||||||
|  |  | ||||||
| from pymongo import ReadPreference | from pymongo import ReadPreference | ||||||
|  | from pymongo import MongoClient | ||||||
| from mongoengine.python_support import IS_PYMONGO_3 |  | ||||||
|  |  | ||||||
| if IS_PYMONGO_3: |  | ||||||
|     from pymongo import MongoClient |  | ||||||
|     CONN_CLASS = MongoClient |  | ||||||
|     READ_PREF = ReadPreference.SECONDARY |  | ||||||
| else: |  | ||||||
|     from pymongo import ReplicaSetConnection |  | ||||||
|     CONN_CLASS = ReplicaSetConnection |  | ||||||
|     READ_PREF = ReadPreference.SECONDARY_ONLY |  | ||||||
|  |  | ||||||
| import mongoengine | import mongoengine | ||||||
| from mongoengine import * |  | ||||||
| from mongoengine.connection import MongoEngineConnectionError | from mongoengine.connection import MongoEngineConnectionError | ||||||
|  |  | ||||||
|  |  | ||||||
|  | CONN_CLASS = MongoClient | ||||||
|  | READ_PREF = ReadPreference.SECONDARY | ||||||
|  |  | ||||||
|  |  | ||||||
| class ConnectionTest(unittest.TestCase): | class ConnectionTest(unittest.TestCase): | ||||||
|  |  | ||||||
|     def setUp(self): |     def setUp(self): | ||||||
| @@ -35,7 +28,7 @@ class ConnectionTest(unittest.TestCase): | |||||||
|         """ |         """ | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             conn = connect(db='mongoenginetest', |             conn = mongoengine.connect(db='mongoenginetest', | ||||||
|                            host="mongodb://localhost/mongoenginetest?replicaSet=rs", |                            host="mongodb://localhost/mongoenginetest?replicaSet=rs", | ||||||
|                            read_preference=READ_PREF) |                            read_preference=READ_PREF) | ||||||
|         except MongoEngineConnectionError as e: |         except MongoEngineConnectionError as e: | ||||||
| @@ -47,5 +40,6 @@ class ConnectionTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         self.assertEqual(conn.read_preference, READ_PREF) |         self.assertEqual(conn.read_preference, READ_PREF) | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|     unittest.main() |     unittest.main() | ||||||
|   | |||||||
| @@ -39,7 +39,6 @@ class SignalTests(unittest.TestCase): | |||||||
|             def post_init(cls, sender, document, **kwargs): |             def post_init(cls, sender, document, **kwargs): | ||||||
|                 signal_output.append('post_init signal, %s, document._created = %s' % (document, document._created)) |                 signal_output.append('post_init signal, %s, document._created = %s' % (document, document._created)) | ||||||
|  |  | ||||||
|  |  | ||||||
|             @classmethod |             @classmethod | ||||||
|             def pre_save(cls, sender, document, **kwargs): |             def pre_save(cls, sender, document, **kwargs): | ||||||
|                 signal_output.append('pre_save signal, %s' % document) |                 signal_output.append('pre_save signal, %s' % document) | ||||||
| @@ -228,6 +227,9 @@ class SignalTests(unittest.TestCase): | |||||||
|  |  | ||||||
|         self.ExplicitId.objects.delete() |         self.ExplicitId.objects.delete() | ||||||
|  |  | ||||||
|  |         # Note that there is a chance that the following assert fails in case | ||||||
|  |         # some receivers (eventually created in other tests) | ||||||
|  |         # gets garbage collected (https://pythonhosted.org/blinker/#blinker.base.Signal.connect) | ||||||
|         self.assertEqual(self.pre_signals, post_signals) |         self.assertEqual(self.pre_signals, post_signals) | ||||||
|  |  | ||||||
|     def test_model_signals(self): |     def test_model_signals(self): | ||||||
| @@ -247,7 +249,7 @@ class SignalTests(unittest.TestCase): | |||||||
|         def load_existing_author(): |         def load_existing_author(): | ||||||
|             a  = self.Author(name='Bill Shakespeare') |             a  = self.Author(name='Bill Shakespeare') | ||||||
|             a.save() |             a.save() | ||||||
|             self.get_signal_output(lambda: None) # eliminate signal output |             self.get_signal_output(lambda: None)  # eliminate signal output | ||||||
|             a1 = self.Author.objects(name='Bill Shakespeare')[0] |             a1 = self.Author.objects(name='Bill Shakespeare')[0] | ||||||
|  |  | ||||||
|         self.assertEqual(self.get_signal_output(create_author), [ |         self.assertEqual(self.get_signal_output(create_author), [ | ||||||
| @@ -431,5 +433,6 @@ class SignalTests(unittest.TestCase): | |||||||
|             {} |             {} | ||||||
|         ]) |         ]) | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|     unittest.main() |     unittest.main() | ||||||
|   | |||||||
							
								
								
									
										38
									
								
								tests/test_utils.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										38
									
								
								tests/test_utils.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,38 @@ | |||||||
|  | import unittest | ||||||
|  | import re | ||||||
|  |  | ||||||
|  | from mongoengine.base.utils import LazyRegexCompiler | ||||||
|  |  | ||||||
|  | signal_output = [] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class LazyRegexCompilerTest(unittest.TestCase): | ||||||
|  |  | ||||||
|  |     def test_lazy_regex_compiler_verify_laziness_of_descriptor(self): | ||||||
|  |         class UserEmail(object): | ||||||
|  |             EMAIL_REGEX = LazyRegexCompiler('@', flags=32) | ||||||
|  |  | ||||||
|  |         descriptor = UserEmail.__dict__['EMAIL_REGEX'] | ||||||
|  |         self.assertIsNone(descriptor._compiled_regex) | ||||||
|  |  | ||||||
|  |         regex = UserEmail.EMAIL_REGEX | ||||||
|  |         self.assertEqual(regex, re.compile('@', flags=32)) | ||||||
|  |         self.assertEqual(regex.search('user@domain.com').group(), '@') | ||||||
|  |  | ||||||
|  |         user_email = UserEmail() | ||||||
|  |         self.assertIs(user_email.EMAIL_REGEX, UserEmail.EMAIL_REGEX) | ||||||
|  |  | ||||||
|  |     def test_lazy_regex_compiler_verify_cannot_set_descriptor_on_instance(self): | ||||||
|  |         class UserEmail(object): | ||||||
|  |             EMAIL_REGEX = LazyRegexCompiler('@') | ||||||
|  |  | ||||||
|  |         user_email = UserEmail() | ||||||
|  |         with self.assertRaises(AttributeError): | ||||||
|  |             user_email.EMAIL_REGEX = re.compile('@') | ||||||
|  |  | ||||||
|  |     def test_lazy_regex_compiler_verify_can_override_class_attr(self): | ||||||
|  |         class UserEmail(object): | ||||||
|  |             EMAIL_REGEX = LazyRegexCompiler('@') | ||||||
|  |  | ||||||
|  |         UserEmail.EMAIL_REGEX = re.compile('cookies') | ||||||
|  |         self.assertEqual(UserEmail.EMAIL_REGEX.search('Cake & cookies').group(), 'cookies') | ||||||
| @@ -1,22 +1,24 @@ | |||||||
|  | import operator | ||||||
| import unittest | import unittest | ||||||
|  |  | ||||||
| from nose.plugins.skip import SkipTest | from nose.plugins.skip import SkipTest | ||||||
|  |  | ||||||
| from mongoengine import connect | from mongoengine import connect | ||||||
| from mongoengine.connection import get_db, get_connection | from mongoengine.connection import get_db, disconnect_all | ||||||
| from mongoengine.python_support import IS_PYMONGO_3 | from mongoengine.mongodb_support import get_mongodb_version | ||||||
|  |  | ||||||
|  |  | ||||||
| MONGO_TEST_DB = 'mongoenginetest' | MONGO_TEST_DB = 'mongoenginetest'   # standard name for the test database | ||||||
|  |  | ||||||
|  |  | ||||||
| class MongoDBTestCase(unittest.TestCase): | class MongoDBTestCase(unittest.TestCase): | ||||||
|     """Base class for tests that need a mongodb connection |     """Base class for tests that need a mongodb connection | ||||||
|     db is being dropped automatically |     It ensures that the db is clean at the beginning and dropped at the end automatically | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def setUpClass(cls): |     def setUpClass(cls): | ||||||
|  |         disconnect_all() | ||||||
|         cls._connection = connect(db=MONGO_TEST_DB) |         cls._connection = connect(db=MONGO_TEST_DB) | ||||||
|         cls._connection.drop_database(MONGO_TEST_DB) |         cls._connection.drop_database(MONGO_TEST_DB) | ||||||
|         cls.db = get_db() |         cls.db = get_db() | ||||||
| @@ -24,55 +26,40 @@ class MongoDBTestCase(unittest.TestCase): | |||||||
|     @classmethod |     @classmethod | ||||||
|     def tearDownClass(cls): |     def tearDownClass(cls): | ||||||
|         cls._connection.drop_database(MONGO_TEST_DB) |         cls._connection.drop_database(MONGO_TEST_DB) | ||||||
|  |         disconnect_all() | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_mongodb_version(): | def get_as_pymongo(doc): | ||||||
|     """Return the version tuple of the MongoDB server that the default |     """Fetch the pymongo version of a certain Document""" | ||||||
|     connection is connected to. |     return doc.__class__.objects.as_pymongo().get(id=doc.id) | ||||||
|     """ |  | ||||||
|     return tuple(get_connection().server_info()['versionArray']) |  | ||||||
|  |  | ||||||
| def _decorated_with_ver_requirement(func, ver_tuple): |  | ||||||
|     """Return a given function decorated with the version requirement | def _decorated_with_ver_requirement(func, mongo_version_req, oper): | ||||||
|     for a particular MongoDB version tuple. |     """Return a MongoDB version requirement decorator. | ||||||
|  |  | ||||||
|  |     The resulting decorator will raise a SkipTest exception if the current | ||||||
|  |     MongoDB version doesn't match the provided version/operator. | ||||||
|  |  | ||||||
|  |     For example, if you define a decorator like so: | ||||||
|  |  | ||||||
|  |         def requires_mongodb_gte_36(func): | ||||||
|  |             return _decorated_with_ver_requirement( | ||||||
|  |                 func, (3.6), oper=operator.ge | ||||||
|  |             ) | ||||||
|  |  | ||||||
|  |     Then tests decorated with @requires_mongodb_gte_36 will be skipped if | ||||||
|  |     ran against MongoDB < v3.6. | ||||||
|  |  | ||||||
|  |     :param mongo_version_req: The mongodb version requirement (tuple(int, int)) | ||||||
|  |     :param oper: The operator to apply (e.g: operator.ge) | ||||||
|     """ |     """ | ||||||
|     def _inner(*args, **kwargs): |     def _inner(*args, **kwargs): | ||||||
|         mongodb_ver = get_mongodb_version() |         mongodb_v = get_mongodb_version() | ||||||
|         if mongodb_ver >= ver_tuple: |         if oper(mongodb_v, mongo_version_req): | ||||||
|             return func(*args, **kwargs) |             return func(*args, **kwargs) | ||||||
|  |  | ||||||
|         raise SkipTest('Needs MongoDB v{}+'.format( |         raise SkipTest('Needs MongoDB v{}+'.format('.'.join(str(n) for n in mongo_version_req))) | ||||||
|             '.'.join([str(v) for v in ver_tuple]) |  | ||||||
|         )) |  | ||||||
|  |  | ||||||
|     _inner.__name__ = func.__name__ |     _inner.__name__ = func.__name__ | ||||||
|     _inner.__doc__ = func.__doc__ |     _inner.__doc__ = func.__doc__ | ||||||
|  |  | ||||||
|     return _inner |     return _inner | ||||||
|  |  | ||||||
| def needs_mongodb_v26(func): |  | ||||||
|     """Raise a SkipTest exception if we're working with MongoDB version |  | ||||||
|     lower than v2.6. |  | ||||||
|     """ |  | ||||||
|     return _decorated_with_ver_requirement(func, (2, 6)) |  | ||||||
|  |  | ||||||
| def needs_mongodb_v3(func): |  | ||||||
|     """Raise a SkipTest exception if we're working with MongoDB version |  | ||||||
|     lower than v3.0. |  | ||||||
|     """ |  | ||||||
|     return _decorated_with_ver_requirement(func, (3, 0)) |  | ||||||
|  |  | ||||||
| def skip_pymongo3(f): |  | ||||||
|     """Raise a SkipTest exception if we're running a test against |  | ||||||
|     PyMongo v3.x. |  | ||||||
|     """ |  | ||||||
|     def _inner(*args, **kwargs): |  | ||||||
|         if IS_PYMONGO_3: |  | ||||||
|             raise SkipTest("Useless with PyMongo 3+") |  | ||||||
|         return f(*args, **kwargs) |  | ||||||
|  |  | ||||||
|     _inner.__name__ = f.__name__ |  | ||||||
|     _inner.__doc__ = f.__doc__ |  | ||||||
|  |  | ||||||
|     return _inner |  | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										2
									
								
								tox.ini
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								tox.ini
									
									
									
									
									
								
							| @@ -6,7 +6,7 @@ commands = | |||||||
|     python setup.py nosetests {posargs} |     python setup.py nosetests {posargs} | ||||||
| deps = | deps = | ||||||
|     nose |     nose | ||||||
|     mg35: PyMongo==3.5 |     mg34x: PyMongo>=3.4,<3.5 | ||||||
|     mg3x: PyMongo>=3.0,<3.7 |     mg3x: PyMongo>=3.0,<3.7 | ||||||
| setenv = | setenv = | ||||||
|     PYTHON_EGG_CACHE = {envdir}/python-eggs |     PYTHON_EGG_CACHE = {envdir}/python-eggs | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user