Compare commits
	
		
			5 Commits
		
	
	
		
			v0.19.1
			...
			simpler-as
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | e124c95621 | ||
|  | 5f1670ffa2 | ||
|  | b09698e926 | ||
|  | d35d969b4e | ||
|  | e751ab55c8 | 
							
								
								
									
										23
									
								
								.install_mongodb_on_travis.sh
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								.install_mongodb_on_travis.sh
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,23 @@ | ||||
| #!/bin/bash | ||||
|  | ||||
| sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10 | ||||
|  | ||||
| if [ "$MONGODB" = "2.4" ]; then | ||||
|     echo "deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen" | sudo tee /etc/apt/sources.list.d/mongodb.list | ||||
|     sudo apt-get update | ||||
|     sudo apt-get install mongodb-10gen=2.4.14 | ||||
|     sudo service mongodb start | ||||
| elif [ "$MONGODB" = "2.6" ]; then | ||||
|     echo "deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen" | sudo tee /etc/apt/sources.list.d/mongodb.list | ||||
|     sudo apt-get update | ||||
|     sudo apt-get install mongodb-org-server=2.6.12 | ||||
|     # service should be started automatically | ||||
| elif [ "$MONGODB" = "3.0" ]; then | ||||
|     echo "deb http://repo.mongodb.org/apt/ubuntu precise/mongodb-org/3.0 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb.list | ||||
|     sudo apt-get update | ||||
|     sudo apt-get install mongodb-org-server=3.0.14 | ||||
|     # service should be started automatically | ||||
| else | ||||
|     echo "Invalid MongoDB version, expected 2.4, 2.6, or 3.0." | ||||
|     exit 1 | ||||
| fi; | ||||
							
								
								
									
										104
									
								
								.travis.yml
									
									
									
									
									
								
							
							
						
						
									
										104
									
								
								.travis.yml
									
									
									
									
									
								
							| @@ -2,87 +2,73 @@ | ||||
| # PyMongo combinations. However, that would result in an overly long build | ||||
| # with a very large number of jobs, hence we only test a subset of all the | ||||
| # combinations: | ||||
| # * MongoDB v3.4 & the latest PyMongo v3.x is currently the "main" setup, | ||||
| #   tested against Python v2.7, v3.5, v3.6, v3.7, v3.8 and PyPy. | ||||
| # * Besides that, we test the lowest actively supported Python/MongoDB/PyMongo | ||||
| #   combination: MongoDB v3.4, PyMongo v3.4, Python v2.7. | ||||
| # * MongoDB v3.6 is tested against Python v3.6, and PyMongo v3.6, v3.7, v3.8. | ||||
| # | ||||
| # We should periodically check MongoDB Server versions supported by MongoDB | ||||
| # Inc., add newly released versions to the test matrix, and remove versions | ||||
| # which have reached their End of Life. See: | ||||
| # 1. https://www.mongodb.com/support-policy. | ||||
| # 2. https://docs.mongodb.com/ecosystem/drivers/driver-compatibility-reference/#python-driver-compatibility | ||||
| # * MongoDB v2.4 & v3.0 are only tested against Python v2.7 & v3.5. | ||||
| # * MongoDB v2.4 is tested against PyMongo v2.7 & v3.x. | ||||
| # * MongoDB v3.0 is tested against PyMongo v3.x. | ||||
| # * MongoDB v2.6 is currently the "main" version tested against Python v2.7, | ||||
| #   v3.5, PyPy & PyPy3, and PyMongo v2.7, v2.8 & v3.x. | ||||
| # | ||||
| # Reminder: Update README.rst if you change MongoDB versions we test. | ||||
|  | ||||
|  | ||||
| language: python | ||||
|  | ||||
| python: | ||||
| - 2.7 | ||||
| - 3.5 | ||||
| - 3.6 | ||||
| - 3.7 | ||||
| - 3.8 | ||||
| - pypy | ||||
| - pypy3 | ||||
|  | ||||
| dist: xenial | ||||
|  | ||||
| env: | ||||
|   global: | ||||
|     - MONGODB_3_4=3.4.17 | ||||
|     - MONGODB_3_6=3.6.12 | ||||
|     - PYMONGO_3_9=3.9 | ||||
|     - PYMONGO_3_6=3.6 | ||||
|     - PYMONGO_3_4=3.4 | ||||
|   matrix: | ||||
|     - MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_9} | ||||
| - MONGODB=2.6 PYMONGO=2.7 | ||||
| - MONGODB=2.6 PYMONGO=2.8 | ||||
| - MONGODB=2.6 PYMONGO=3.0 | ||||
|  | ||||
| matrix: | ||||
|  | ||||
|   # Finish the build as soon as one job fails | ||||
|   fast_finish: true | ||||
|  | ||||
|   include: | ||||
|   - python: 2.7 | ||||
|     env: MONGODB=${MONGODB_3_4} PYMONGO=${PYMONGO_3_4} | ||||
|   - python: 3.7 | ||||
|     env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_6} | ||||
|   - python: 3.7 | ||||
|     env: MONGODB=${MONGODB_3_6} PYMONGO=${PYMONGO_3_9} | ||||
|     env: MONGODB=2.4 PYMONGO=2.7 | ||||
|   - python: 2.7 | ||||
|     env: MONGODB=2.4 PYMONGO=3.0 | ||||
|   - python: 2.7 | ||||
|     env: MONGODB=3.0 PYMONGO=3.0 | ||||
|   - python: 3.5 | ||||
|     env: MONGODB=2.4 PYMONGO=2.7 | ||||
|   - python: 3.5 | ||||
|     env: MONGODB=2.4 PYMONGO=3.0 | ||||
|   - python: 3.5 | ||||
|     env: MONGODB=3.0 PYMONGO=3.0 | ||||
|  | ||||
| before_install: | ||||
| - bash .install_mongodb_on_travis.sh | ||||
|  | ||||
| install: | ||||
|   # Install Mongo | ||||
|   - wget http://fastdl.mongodb.org/linux/mongodb-linux-x86_64-${MONGODB}.tgz | ||||
|   - tar xzf mongodb-linux-x86_64-${MONGODB}.tgz | ||||
|   - ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --version | ||||
|   # Install Python dependencies. | ||||
|   - pip install --upgrade pip | ||||
|   - pip install coveralls | ||||
|   - pip install flake8 flake8-import-order | ||||
|   - pip install tox         # tox 3.11.0 has requirement virtualenv>=14.0.0 | ||||
|   - pip install virtualenv  # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32) | ||||
|   # tox dryrun to setup the tox venv (we run a mock test). | ||||
|   - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "-k=test_ci_placeholder" | ||||
|   # Install black for Python v3.7 only. | ||||
|   - if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then pip install black; fi | ||||
| - sudo apt-get install python-dev python3-dev libopenjpeg-dev zlib1g-dev libjpeg-turbo8-dev | ||||
|   libtiff4-dev libjpeg8-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.5-dev tk8.5-dev | ||||
|   python-tk | ||||
| - travis_retry pip install --upgrade pip | ||||
| - travis_retry pip install coveralls | ||||
| - travis_retry pip install flake8 flake8-import-order | ||||
| - travis_retry pip install tox>=1.9 | ||||
| - travis_retry pip install "virtualenv<14.0.0"  # virtualenv>=14.0.0 has dropped Python 3.2 support (and pypy3 is based on py32) | ||||
| - travis_retry tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -e test | ||||
|  | ||||
| # Cache dependencies installed via pip | ||||
| cache: pip | ||||
|  | ||||
| # Run flake8 for py27 | ||||
| before_script: | ||||
|   - mkdir ${PWD}/mongodb-linux-x86_64-${MONGODB}/data | ||||
|   - ${PWD}/mongodb-linux-x86_64-${MONGODB}/bin/mongod --dbpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/data --logpath ${PWD}/mongodb-linux-x86_64-${MONGODB}/mongodb.log --fork | ||||
|   - if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi   # Run flake8 for Python 2.7 only | ||||
|   - if [[ $TRAVIS_PYTHON_VERSION == '3.7' ]]; then black --check .; else echo "black only runs on py37"; fi   # Run black for Python 3.7 only | ||||
|   - mongo --eval 'db.version();'    # Make sure mongo is awake | ||||
| - if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then flake8 .; else echo "flake8 only runs on py27"; fi | ||||
|  | ||||
| script: | ||||
|   - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- -a "--cov=mongoengine" | ||||
| - tox -e $(echo py$TRAVIS_PYTHON_VERSION-mg$PYMONGO | tr -d . | sed -e 's/pypypy/pypy/') -- --with-coverage | ||||
|  | ||||
| # For now only submit coveralls for Python v2.7. Python v3.x currently shows | ||||
| # 0% coverage. That's caused by 'use_2to3', which builds the py3-compatible | ||||
| # code in a separate dir and runs tests on that. | ||||
| after_success: | ||||
| - if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then coveralls --verbose; else echo "coveralls only sent for py27"; fi | ||||
| - if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then coveralls --verbose; fi | ||||
|  | ||||
| notifications: | ||||
|   irc: irc.freenode.org#mongoengine | ||||
| @@ -100,15 +86,15 @@ deploy: | ||||
|   password: | ||||
|     secure: QMyatmWBnC6ZN3XLW2+fTBDU4LQcp1m/LjR2/0uamyeUzWKdlOoh/Wx5elOgLwt/8N9ppdPeG83ose1jOz69l5G0MUMjv8n/RIcMFSpCT59tGYqn3kh55b0cIZXFT9ar+5cxlif6a5rS72IHm5li7QQyxexJIII6Uxp0kpvUmek= | ||||
|  | ||||
|   # Create a source distribution and a pure python wheel for faster installs. | ||||
|   # create a source distribution and a pure python wheel for faster installs | ||||
|   distributions: "sdist bdist_wheel" | ||||
|  | ||||
|   # Only deploy on tagged commits (aka GitHub releases) and only for the parent | ||||
|   # repo's builds running Python v2.7 along with PyMongo v3.x and MongoDB v3.4. | ||||
|   # We run Travis against many different Python, PyMongo, and MongoDB versions | ||||
|   # and we don't want the deploy to occur multiple times). | ||||
|   # only deploy on tagged commits (aka GitHub releases) and only for the | ||||
|   # parent repo's builds running Python 2.7 along with dev PyMongo (we run | ||||
|   # Travis against many different Python and PyMongo versions and we don't | ||||
|   # want the deploy to occur multiple times). | ||||
|   on: | ||||
|     tags: true | ||||
|     repo: MongoEngine/mongoengine | ||||
|     condition: ($PYMONGO = ${PYMONGO_3_9}) && ($MONGODB = ${MONGODB_3_4}) | ||||
|     condition: "$PYMONGO = 3.0" | ||||
|     python: 2.7 | ||||
|   | ||||
							
								
								
									
										12
									
								
								AUTHORS
									
									
									
									
									
								
							
							
						
						
									
										12
									
								
								AUTHORS
									
									
									
									
									
								
							| @@ -243,15 +243,3 @@ that much better: | ||||
|  * Victor Varvaryuk | ||||
|  * Stanislav Kaledin (https://github.com/sallyruthstruik) | ||||
|  * Dmitry Yantsen (https://github.com/mrTable) | ||||
|  * Renjianxin (https://github.com/Davidrjx) | ||||
|  * Erdenezul Batmunkh (https://github.com/erdenezul) | ||||
|  * Andy Yankovsky (https://github.com/werat) | ||||
|  * Bastien Gérard (https://github.com/bagerard) | ||||
|  * Trevor Hall (https://github.com/tjhall13) | ||||
|  * Gleb Voropaev (https://github.com/buggyspace) | ||||
|  * Paulo Amaral (https://github.com/pauloAmaral) | ||||
|  * Gaurav Dadhania (https://github.com/GVRV) | ||||
|  * Yurii Andrieiev (https://github.com/yandrieiev) | ||||
|  * Filip Kucharczyk (https://github.com/Pacu2) | ||||
|  * Eric Timmons (https://github.com/daewok) | ||||
|  * Matthew Simpson (https://github.com/mcsimps2) | ||||
|   | ||||
| @@ -22,17 +22,18 @@ Supported Interpreters | ||||
|  | ||||
| MongoEngine supports CPython 2.7 and newer. Language | ||||
| features not supported by all interpreters can not be used. | ||||
| The codebase is written in python 2 so you must be using python 2 | ||||
| when developing new features. Compatibility of the library with Python 3 | ||||
| relies on the 2to3 package that gets executed as part of the installation | ||||
| build. You should ensure that your code is properly converted by | ||||
| `2to3 <http://docs.python.org/library/2to3.html>`_. | ||||
| Please also ensure that your code is properly converted by | ||||
| `2to3 <http://docs.python.org/library/2to3.html>`_ for Python 3 support. | ||||
|  | ||||
| Style Guide | ||||
| ----------- | ||||
|  | ||||
| MongoEngine uses `black <https://github.com/python/black>`_ for code | ||||
| formatting. | ||||
| MongoEngine aims to follow `PEP8 <http://www.python.org/dev/peps/pep-0008/>`_ | ||||
| including 4 space indents. When possible we try to stick to 79 character line | ||||
| limits. However, screens got bigger and an ORM has a strong focus on | ||||
| readability and if it can help, we accept 119 as maximum line length, in a | ||||
| similar way as `django does | ||||
| <https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/coding-style/#python-style>`_ | ||||
|  | ||||
| Testing | ||||
| ------- | ||||
| @@ -54,7 +55,7 @@ General Guidelines | ||||
|   should adapt to the breaking change in docs/upgrade.rst. | ||||
| - Write inline documentation for new classes and methods. | ||||
| - Write tests and make sure they pass (make sure you have a mongod | ||||
|   running on the default port, then execute ``python setup.py test`` | ||||
|   running on the default port, then execute ``python setup.py nosetests`` | ||||
|   from the cmd line to run the test suite). | ||||
| - Ensure tests pass on all supported Python, PyMongo, and MongoDB versions. | ||||
|   You can test various Python and PyMongo versions locally by executing | ||||
|   | ||||
							
								
								
									
										36
									
								
								README.rst
									
									
									
									
									
								
							
							
						
						
									
										36
									
								
								README.rst
									
									
									
									
									
								
							| @@ -26,28 +26,26 @@ an `API reference <https://mongoengine-odm.readthedocs.io/apireference.html>`_. | ||||
|  | ||||
| Supported MongoDB Versions | ||||
| ========================== | ||||
| MongoEngine is currently tested against MongoDB v3.4 and v3.6. Future versions | ||||
| should be supported as well, but aren't actively tested at the moment. Make | ||||
| sure to open an issue or submit a pull request if you experience any problems | ||||
| with MongoDB version > 3.6. | ||||
| MongoEngine is currently tested against MongoDB v2.4, v2.6, and v3.0. Future | ||||
| versions should be supported as well, but aren't actively tested at the moment. | ||||
| Make sure to open an issue or submit a pull request if you experience any | ||||
| problems with MongoDB v3.2+. | ||||
|  | ||||
| Installation | ||||
| ============ | ||||
| We recommend the use of `virtualenv <https://virtualenv.pypa.io/>`_ and of | ||||
| `pip <https://pip.pypa.io/>`_. You can then use ``pip install -U mongoengine``. | ||||
| You may also have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ | ||||
| and thus you can use ``easy_install -U mongoengine``. Another option is | ||||
| `pipenv <https://docs.pipenv.org/>`_. You can then use ``pipenv install mongoengine`` | ||||
| to both create the virtual environment and install the package. Otherwise, you can | ||||
| download the source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and | ||||
| run ``python setup.py install``. | ||||
| and thus you can use ``easy_install -U mongoengine``. Otherwise, you can download the | ||||
| source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python | ||||
| setup.py install``. | ||||
|  | ||||
| Dependencies | ||||
| ============ | ||||
| All of the dependencies can easily be installed via `pip <https://pip.pypa.io/>`_. | ||||
| At the very least, you'll need these two packages to use MongoEngine: | ||||
|  | ||||
| - pymongo>=3.4 | ||||
| - pymongo>=2.7.1 | ||||
| - six>=1.10.0 | ||||
|  | ||||
| If you utilize a ``DateTimeField``, you might also use a more flexible date parser: | ||||
| @@ -91,11 +89,12 @@ Some simple examples of what MongoEngine code looks like: | ||||
|  | ||||
|     # Iterate over all posts using the BlogPost superclass | ||||
|     >>> for post in BlogPost.objects: | ||||
|     ...     print('===', post.title, '===') | ||||
|     ...     print '===', post.title, '===' | ||||
|     ...     if isinstance(post, TextPost): | ||||
|     ...         print(post.content) | ||||
|     ...         print post.content | ||||
|     ...     elif isinstance(post, LinkPost): | ||||
|     ...         print('Link:', post.url) | ||||
|     ...         print 'Link:', post.url | ||||
|     ...     print | ||||
|     ... | ||||
|  | ||||
|     # Count all blog posts and its subtypes | ||||
| @@ -115,8 +114,7 @@ Some simple examples of what MongoEngine code looks like: | ||||
| Tests | ||||
| ===== | ||||
| To run the test suite, ensure you are running a local instance of MongoDB on | ||||
| the standard port and have ``pytest`` installed. Then, run ``python setup.py test`` | ||||
| or simply ``pytest``. | ||||
| the standard port and have ``nose`` installed. Then, run ``python setup.py nosetests``. | ||||
|  | ||||
| To run the test suite on every supported Python and PyMongo version, you can | ||||
| use ``tox``. You'll need to make sure you have each supported Python version | ||||
| @@ -129,14 +127,16 @@ installed in your environment and then: | ||||
|     # Run the test suites | ||||
|     $ tox | ||||
|  | ||||
| If you wish to run a subset of tests, use the pytest convention: | ||||
| If you wish to run a subset of tests, use the nosetests convention: | ||||
|  | ||||
| .. code-block:: shell | ||||
|  | ||||
|     # Run all the tests in a particular test file | ||||
|     $ pytest tests/fields/test_fields.py | ||||
|     $ python setup.py nosetests --tests tests/fields/fields.py | ||||
|     # Run only particular test class in that file | ||||
|     $ pytest tests/fields/test_fields.py::TestField | ||||
|     $ python setup.py nosetests --tests tests/fields/fields.py:FieldTest | ||||
|     # Use the -s option if you want to print some debug statements or use pdb | ||||
|     $ python setup.py nosetests --tests tests/fields/fields.py:FieldTest -s | ||||
|  | ||||
| Community | ||||
| ========= | ||||
|   | ||||
							
								
								
									
										207
									
								
								benchmark.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										207
									
								
								benchmark.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,207 @@ | ||||
| #!/usr/bin/env python | ||||
|  | ||||
| """ | ||||
| Simple benchmark comparing PyMongo and MongoEngine. | ||||
|  | ||||
| Sample run on a mid 2015 MacBook Pro (commit b282511): | ||||
|  | ||||
| Benchmarking... | ||||
| ---------------------------------------------------------------------------------------------------- | ||||
| Creating 10000 dictionaries - Pymongo | ||||
| 2.58979988098 | ||||
| ---------------------------------------------------------------------------------------------------- | ||||
| Creating 10000 dictionaries - Pymongo write_concern={"w": 0} | ||||
| 1.26657605171 | ||||
| ---------------------------------------------------------------------------------------------------- | ||||
| Creating 10000 dictionaries - MongoEngine | ||||
| 8.4351580143 | ||||
| ---------------------------------------------------------------------------------------------------- | ||||
| Creating 10000 dictionaries without continual assign - MongoEngine | ||||
| 7.20191693306 | ||||
| ---------------------------------------------------------------------------------------------------- | ||||
| Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True | ||||
| 6.31104588509 | ||||
| ---------------------------------------------------------------------------------------------------- | ||||
| Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True | ||||
| 6.07083487511 | ||||
| ---------------------------------------------------------------------------------------------------- | ||||
| Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False | ||||
| 5.97704291344 | ||||
| ---------------------------------------------------------------------------------------------------- | ||||
| Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False | ||||
| 5.9111430645 | ||||
| """ | ||||
|  | ||||
| import timeit | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     print("Benchmarking...") | ||||
|  | ||||
|     setup = """ | ||||
| from pymongo import MongoClient | ||||
| connection = MongoClient() | ||||
| connection.drop_database('timeit_test') | ||||
| """ | ||||
|  | ||||
|     stmt = """ | ||||
| from pymongo import MongoClient | ||||
| connection = MongoClient() | ||||
|  | ||||
| db = connection.timeit_test | ||||
| noddy = db.noddy | ||||
|  | ||||
| for i in range(10000): | ||||
|     example = {'fields': {}} | ||||
|     for j in range(20): | ||||
|         example['fields']['key' + str(j)] = 'value ' + str(j) | ||||
|  | ||||
|     noddy.save(example) | ||||
|  | ||||
| myNoddys = noddy.find() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print("-" * 100) | ||||
|     print("""Creating 10000 dictionaries - Pymongo""") | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print(t.timeit(1)) | ||||
|  | ||||
|     stmt = """ | ||||
| from pymongo import MongoClient | ||||
| from pymongo.write_concern import WriteConcern | ||||
| connection = MongoClient() | ||||
|  | ||||
| db = connection.get_database('timeit_test', write_concern=WriteConcern(w=0)) | ||||
| noddy = db.noddy | ||||
|  | ||||
| for i in range(10000): | ||||
|     example = {'fields': {}} | ||||
|     for j in range(20): | ||||
|         example['fields']["key"+str(j)] = "value "+str(j) | ||||
|  | ||||
|     noddy.save(example) | ||||
|  | ||||
| myNoddys = noddy.find() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print("-" * 100) | ||||
|     print("""Creating 10000 dictionaries - Pymongo write_concern={"w": 0}""") | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print(t.timeit(1)) | ||||
|  | ||||
|     setup = """ | ||||
| from pymongo import MongoClient | ||||
| connection = MongoClient() | ||||
| connection.drop_database('timeit_test') | ||||
| connection.close() | ||||
|  | ||||
| from mongoengine import Document, DictField, connect | ||||
| connect('timeit_test') | ||||
|  | ||||
| class Noddy(Document): | ||||
|     fields = DictField() | ||||
| """ | ||||
|  | ||||
|     stmt = """ | ||||
| for i in range(10000): | ||||
|     noddy = Noddy() | ||||
|     for j in range(20): | ||||
|         noddy.fields["key"+str(j)] = "value "+str(j) | ||||
|     noddy.save() | ||||
|  | ||||
| myNoddys = Noddy.objects() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print("-" * 100) | ||||
|     print("""Creating 10000 dictionaries - MongoEngine""") | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print(t.timeit(1)) | ||||
|  | ||||
|     stmt = """ | ||||
| for i in range(10000): | ||||
|     noddy = Noddy() | ||||
|     fields = {} | ||||
|     for j in range(20): | ||||
|         fields["key"+str(j)] = "value "+str(j) | ||||
|     noddy.fields = fields | ||||
|     noddy.save() | ||||
|  | ||||
| myNoddys = Noddy.objects() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print("-" * 100) | ||||
|     print("""Creating 10000 dictionaries without continual assign - MongoEngine""") | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print(t.timeit(1)) | ||||
|  | ||||
|     stmt = """ | ||||
| for i in range(10000): | ||||
|     noddy = Noddy() | ||||
|     for j in range(20): | ||||
|         noddy.fields["key"+str(j)] = "value "+str(j) | ||||
|     noddy.save(write_concern={"w": 0}, cascade=True) | ||||
|  | ||||
| myNoddys = Noddy.objects() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print("-" * 100) | ||||
|     print("""Creating 10000 dictionaries - MongoEngine - write_concern={"w": 0}, cascade = True""") | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print(t.timeit(1)) | ||||
|  | ||||
|     stmt = """ | ||||
| for i in range(10000): | ||||
|     noddy = Noddy() | ||||
|     for j in range(20): | ||||
|         noddy.fields["key"+str(j)] = "value "+str(j) | ||||
|     noddy.save(write_concern={"w": 0}, validate=False, cascade=True) | ||||
|  | ||||
| myNoddys = Noddy.objects() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print("-" * 100) | ||||
|     print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False, cascade=True""") | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print(t.timeit(1)) | ||||
|  | ||||
|     stmt = """ | ||||
| for i in range(10000): | ||||
|     noddy = Noddy() | ||||
|     for j in range(20): | ||||
|         noddy.fields["key"+str(j)] = "value "+str(j) | ||||
|     noddy.save(validate=False, write_concern={"w": 0}) | ||||
|  | ||||
| myNoddys = Noddy.objects() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print("-" * 100) | ||||
|     print("""Creating 10000 dictionaries - MongoEngine, write_concern={"w": 0}, validate=False""") | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print(t.timeit(1)) | ||||
|  | ||||
|     stmt = """ | ||||
| for i in range(10000): | ||||
|     noddy = Noddy() | ||||
|     for j in range(20): | ||||
|         noddy.fields["key"+str(j)] = "value "+str(j) | ||||
|     noddy.save(force_insert=True, write_concern={"w": 0}, validate=False) | ||||
|  | ||||
| myNoddys = Noddy.objects() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print("-" * 100) | ||||
|     print("""Creating 10000 dictionaries - MongoEngine, force_insert=True, write_concern={"w": 0}, validate=False""") | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print(t.timeit(1)) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
| @@ -1,142 +0,0 @@ | ||||
| from timeit import repeat | ||||
|  | ||||
| import mongoengine | ||||
| from mongoengine import ( | ||||
|     BooleanField, | ||||
|     Document, | ||||
|     EmailField, | ||||
|     EmbeddedDocument, | ||||
|     EmbeddedDocumentField, | ||||
|     IntField, | ||||
|     ListField, | ||||
|     StringField, | ||||
| ) | ||||
|  | ||||
| mongoengine.connect(db="mongoengine_benchmark_test") | ||||
|  | ||||
|  | ||||
| def timeit(f, n=10000): | ||||
|     return min(repeat(f, repeat=3, number=n)) / float(n) | ||||
|  | ||||
|  | ||||
| def test_basic(): | ||||
|     class Book(Document): | ||||
|         name = StringField() | ||||
|         pages = IntField() | ||||
|         tags = ListField(StringField()) | ||||
|         is_published = BooleanField() | ||||
|         author_email = EmailField() | ||||
|  | ||||
|     Book.drop_collection() | ||||
|  | ||||
|     def init_book(): | ||||
|         return Book( | ||||
|             name="Always be closing", | ||||
|             pages=100, | ||||
|             tags=["self-help", "sales"], | ||||
|             is_published=True, | ||||
|             author_email="alec@example.com", | ||||
|         ) | ||||
|  | ||||
|     print("Doc initialization: %.3fus" % (timeit(init_book, 1000) * 10 ** 6)) | ||||
|  | ||||
|     b = init_book() | ||||
|     print("Doc getattr: %.3fus" % (timeit(lambda: b.name, 10000) * 10 ** 6)) | ||||
|  | ||||
|     print( | ||||
|         "Doc setattr: %.3fus" | ||||
|         % (timeit(lambda: setattr(b, "name", "New name"), 10000) * 10 ** 6) | ||||
|     ) | ||||
|  | ||||
|     print("Doc to mongo: %.3fus" % (timeit(b.to_mongo, 1000) * 10 ** 6)) | ||||
|  | ||||
|     print("Doc validation: %.3fus" % (timeit(b.validate, 1000) * 10 ** 6)) | ||||
|  | ||||
|     def save_book(): | ||||
|         b._mark_as_changed("name") | ||||
|         b._mark_as_changed("tags") | ||||
|         b.save() | ||||
|  | ||||
|     print("Save to database: %.3fus" % (timeit(save_book, 100) * 10 ** 6)) | ||||
|  | ||||
|     son = b.to_mongo() | ||||
|     print( | ||||
|         "Load from SON: %.3fus" % (timeit(lambda: Book._from_son(son), 1000) * 10 ** 6) | ||||
|     ) | ||||
|  | ||||
|     print( | ||||
|         "Load from database: %.3fus" % (timeit(lambda: Book.objects[0], 100) * 10 ** 6) | ||||
|     ) | ||||
|  | ||||
|     def create_and_delete_book(): | ||||
|         b = init_book() | ||||
|         b.save() | ||||
|         b.delete() | ||||
|  | ||||
|     print( | ||||
|         "Init + save to database + delete: %.3fms" | ||||
|         % (timeit(create_and_delete_book, 10) * 10 ** 3) | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def test_big_doc(): | ||||
|     class Contact(EmbeddedDocument): | ||||
|         name = StringField() | ||||
|         title = StringField() | ||||
|         address = StringField() | ||||
|  | ||||
|     class Company(Document): | ||||
|         name = StringField() | ||||
|         contacts = ListField(EmbeddedDocumentField(Contact)) | ||||
|  | ||||
|     Company.drop_collection() | ||||
|  | ||||
|     def init_company(): | ||||
|         return Company( | ||||
|             name="MongoDB, Inc.", | ||||
|             contacts=[ | ||||
|                 Contact(name="Contact %d" % x, title="CEO", address="Address %d" % x) | ||||
|                 for x in range(1000) | ||||
|             ], | ||||
|         ) | ||||
|  | ||||
|     company = init_company() | ||||
|     print("Big doc to mongo: %.3fms" % (timeit(company.to_mongo, 100) * 10 ** 3)) | ||||
|  | ||||
|     print("Big doc validation: %.3fms" % (timeit(company.validate, 1000) * 10 ** 3)) | ||||
|  | ||||
|     company.save() | ||||
|  | ||||
|     def save_company(): | ||||
|         company._mark_as_changed("name") | ||||
|         company._mark_as_changed("contacts") | ||||
|         company.save() | ||||
|  | ||||
|     print("Save to database: %.3fms" % (timeit(save_company, 100) * 10 ** 3)) | ||||
|  | ||||
|     son = company.to_mongo() | ||||
|     print( | ||||
|         "Load from SON: %.3fms" | ||||
|         % (timeit(lambda: Company._from_son(son), 100) * 10 ** 3) | ||||
|     ) | ||||
|  | ||||
|     print( | ||||
|         "Load from database: %.3fms" | ||||
|         % (timeit(lambda: Company.objects[0], 100) * 10 ** 3) | ||||
|     ) | ||||
|  | ||||
|     def create_and_delete_company(): | ||||
|         c = init_company() | ||||
|         c.save() | ||||
|         c.delete() | ||||
|  | ||||
|     print( | ||||
|         "Init + save to database + delete: %.3fms" | ||||
|         % (timeit(create_and_delete_company, 10) * 10 ** 3) | ||||
|     ) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     test_basic() | ||||
|     print("-" * 100) | ||||
|     test_big_doc() | ||||
| @@ -1,158 +0,0 @@ | ||||
| import timeit | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     setup = """ | ||||
| from pymongo import MongoClient | ||||
| connection = MongoClient() | ||||
| connection.drop_database('mongoengine_benchmark_test') | ||||
| """ | ||||
|  | ||||
|     stmt = """ | ||||
| from pymongo import MongoClient | ||||
| connection = MongoClient() | ||||
|  | ||||
| db = connection.mongoengine_benchmark_test | ||||
| noddy = db.noddy | ||||
|  | ||||
| for i in range(10000): | ||||
|     example = {'fields': {}} | ||||
|     for j in range(20): | ||||
|         example['fields']["key"+str(j)] = "value "+str(j) | ||||
|  | ||||
|     noddy.insert_one(example) | ||||
|  | ||||
| myNoddys = noddy.find() | ||||
| [n for n in myNoddys]  # iterate | ||||
| """ | ||||
|  | ||||
|     print("-" * 100) | ||||
|     print("PyMongo: Creating 10000 dictionaries.") | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print("{}s".format(t.timeit(1))) | ||||
|  | ||||
|     stmt = """ | ||||
| from pymongo import MongoClient, WriteConcern | ||||
| connection = MongoClient() | ||||
|  | ||||
| db = connection.mongoengine_benchmark_test | ||||
| noddy = db.noddy.with_options(write_concern=WriteConcern(w=0)) | ||||
|  | ||||
| for i in range(10000): | ||||
|     example = {'fields': {}} | ||||
|     for j in range(20): | ||||
|         example['fields']["key"+str(j)] = "value "+str(j) | ||||
|  | ||||
|     noddy.insert_one(example) | ||||
|  | ||||
| myNoddys = noddy.find() | ||||
| [n for n in myNoddys]  # iterate | ||||
| """ | ||||
|  | ||||
|     print("-" * 100) | ||||
|     print('PyMongo: Creating 10000 dictionaries (write_concern={"w": 0}).') | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print("{}s".format(t.timeit(1))) | ||||
|  | ||||
|     setup = """ | ||||
| from pymongo import MongoClient | ||||
| connection = MongoClient() | ||||
| connection.drop_database('mongoengine_benchmark_test') | ||||
| connection.close() | ||||
|  | ||||
| from mongoengine import Document, DictField, connect | ||||
| connect("mongoengine_benchmark_test") | ||||
|  | ||||
| class Noddy(Document): | ||||
|     fields = DictField() | ||||
| """ | ||||
|  | ||||
|     stmt = """ | ||||
| for i in range(10000): | ||||
|     noddy = Noddy() | ||||
|     for j in range(20): | ||||
|         noddy.fields["key"+str(j)] = "value "+str(j) | ||||
|     noddy.save() | ||||
|  | ||||
| myNoddys = Noddy.objects() | ||||
| [n for n in myNoddys]  # iterate | ||||
| """ | ||||
|  | ||||
|     print("-" * 100) | ||||
|     print("MongoEngine: Creating 10000 dictionaries.") | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print("{}s".format(t.timeit(1))) | ||||
|  | ||||
|     stmt = """ | ||||
| for i in range(10000): | ||||
|     noddy = Noddy() | ||||
|     fields = {} | ||||
|     for j in range(20): | ||||
|         fields["key"+str(j)] = "value "+str(j) | ||||
|     noddy.fields = fields | ||||
|     noddy.save() | ||||
|  | ||||
| myNoddys = Noddy.objects() | ||||
| [n for n in myNoddys]  # iterate | ||||
| """ | ||||
|  | ||||
|     print("-" * 100) | ||||
|     print("MongoEngine: Creating 10000 dictionaries (using a single field assignment).") | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print("{}s".format(t.timeit(1))) | ||||
|  | ||||
|     stmt = """ | ||||
| for i in range(10000): | ||||
|     noddy = Noddy() | ||||
|     for j in range(20): | ||||
|         noddy.fields["key"+str(j)] = "value "+str(j) | ||||
|     noddy.save(write_concern={"w": 0}) | ||||
|  | ||||
| myNoddys = Noddy.objects() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print("-" * 100) | ||||
|     print('MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}).') | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print("{}s".format(t.timeit(1))) | ||||
|  | ||||
|     stmt = """ | ||||
| for i in range(10000): | ||||
|     noddy = Noddy() | ||||
|     for j in range(20): | ||||
|         noddy.fields["key"+str(j)] = "value "+str(j) | ||||
|     noddy.save(write_concern={"w": 0}, validate=False) | ||||
|  | ||||
| myNoddys = Noddy.objects() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print("-" * 100) | ||||
|     print( | ||||
|         'MongoEngine: Creating 10000 dictionaries (write_concern={"w": 0}, validate=False).' | ||||
|     ) | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print("{}s".format(t.timeit(1))) | ||||
|  | ||||
|     stmt = """ | ||||
| for i in range(10000): | ||||
|     noddy = Noddy() | ||||
|     for j in range(20): | ||||
|         noddy.fields["key"+str(j)] = "value "+str(j) | ||||
|     noddy.save(force_insert=True, write_concern={"w": 0}, validate=False) | ||||
|  | ||||
| myNoddys = Noddy.objects() | ||||
| [n for n in myNoddys] # iterate | ||||
| """ | ||||
|  | ||||
|     print("-" * 100) | ||||
|     print( | ||||
|         'MongoEngine: Creating 10000 dictionaries (force_insert=True, write_concern={"w": 0}, validate=False).' | ||||
|     ) | ||||
|     t = timeit.Timer(stmt=stmt, setup=setup) | ||||
|     print("{}s".format(t.timeit(1))) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
| @@ -13,7 +13,6 @@ Documents | ||||
|  | ||||
| .. autoclass:: mongoengine.Document | ||||
|    :members: | ||||
|    :inherited-members: | ||||
|  | ||||
|    .. attribute:: objects | ||||
|  | ||||
| @@ -22,18 +21,15 @@ Documents | ||||
|  | ||||
| .. autoclass:: mongoengine.EmbeddedDocument | ||||
|    :members: | ||||
|    :inherited-members: | ||||
|  | ||||
| .. autoclass:: mongoengine.DynamicDocument | ||||
|    :members: | ||||
|    :inherited-members: | ||||
|  | ||||
| .. autoclass:: mongoengine.DynamicEmbeddedDocument | ||||
|    :members: | ||||
|    :inherited-members: | ||||
|  | ||||
| .. autoclass:: mongoengine.document.MapReduceDocument | ||||
|    :members: | ||||
|   :members: | ||||
|  | ||||
| .. autoclass:: mongoengine.ValidationError | ||||
|   :members: | ||||
| @@ -91,9 +87,7 @@ Fields | ||||
| .. autoclass:: mongoengine.fields.DictField | ||||
| .. autoclass:: mongoengine.fields.MapField | ||||
| .. autoclass:: mongoengine.fields.ReferenceField | ||||
| .. autoclass:: mongoengine.fields.LazyReferenceField | ||||
| .. autoclass:: mongoengine.fields.GenericReferenceField | ||||
| .. autoclass:: mongoengine.fields.GenericLazyReferenceField | ||||
| .. autoclass:: mongoengine.fields.CachedReferenceField | ||||
| .. autoclass:: mongoengine.fields.BinaryField | ||||
| .. autoclass:: mongoengine.fields.FileField | ||||
|   | ||||
| @@ -1,4 +1,3 @@ | ||||
|  | ||||
| ========= | ||||
| Changelog | ||||
| ========= | ||||
| @@ -7,237 +6,80 @@ Development | ||||
| =========== | ||||
| - (Fill this out as you fix issues and develop your features). | ||||
|  | ||||
| Changes in 0.19.1 | ||||
| ================= | ||||
| - Requires Pillow < 7.0.0 as it dropped Python2 support | ||||
| - DEPRECATION: The interface of ``QuerySet.aggregate`` method was changed, it no longer takes an unpacked list of | ||||
|     pipeline steps (*pipeline) but simply takes the pipeline list just like ``pymongo.Collection.aggregate`` does. #2079 | ||||
|  | ||||
| Changes in 0.19.0 | ||||
| ================= | ||||
| - BREAKING CHANGE: ``class_check`` and ``read_preference`` keyword arguments are no longer available when filtering a ``QuerySet``. #2112 | ||||
|     - Instead of ``Doc.objects(foo=bar, read_preference=...)`` use ``Doc.objects(foo=bar).read_preference(...)``. | ||||
|     - Instead of ``Doc.objects(foo=bar, class_check=False)`` use ``Doc.objects(foo=bar).clear_cls_query(...)``. | ||||
|     - This change also renames the private ``QuerySet._initial_query`` attribute to ``_cls_query``. | ||||
| - BREAKING CHANGE: Removed the deprecated ``format`` param from ``QuerySet.explain``. #2113 | ||||
| - BREAKING CHANGE: Renamed ``MongoEngineConnectionError`` to ``ConnectionFailure``. #2111 | ||||
|     - If you catch/use ``MongoEngineConnectionError`` in your code, you'll have to rename it. | ||||
| - BREAKING CHANGE: Positional arguments when instantiating a document are no longer supported. #2103 | ||||
|     - From now on keyword arguments (e.g. ``Doc(field_name=value)``) are required. | ||||
| - BREAKING CHANGE: A ``LazyReferenceField`` is now stored in the ``_data`` field of its parent as a ``DBRef``, ``Document``, or ``EmbeddedDocument`` (``ObjectId`` is no longer allowed). #2182 | ||||
| - DEPRECATION: ``Q.empty`` & ``QNode.empty`` are marked as deprecated and will be removed in a next version of MongoEngine. #2210 | ||||
|     - Added ability to check if Q or QNode are empty by parsing them to bool. | ||||
|     - Instead of ``Q(name="John").empty`` use ``not Q(name="John")``. | ||||
| - Fix updating/modifying/deleting/reloading a document that's sharded by a field with ``db_field`` specified. #2125 | ||||
| - Only set no_cursor_timeout when requested (fixes an incompatibility with MongoDB 4.2) #2148 | ||||
| - ``ListField`` now accepts an optional ``max_length`` parameter. #2110 | ||||
| - Improve error message related to InvalidDocumentError #2180 | ||||
| - Added BulkWriteError to replace NotUniqueError which was misleading in bulk write insert #2152 | ||||
| - Added ability to compare Q and Q operations #2204 | ||||
| - Added ability to use a db alias on query_counter #2194 | ||||
| - Added ability to specify collations for querysets with ``Doc.objects.collation`` #2024 | ||||
| - Fix updates of a list field by negative index #2094 | ||||
| - Switch from nosetest to pytest as test runner #2114 | ||||
| - The codebase is now formatted using ``black``. #2109 | ||||
| - Documentation improvements: | ||||
|     - Documented how `pymongo.monitoring` can be used to log all queries issued by MongoEngine to the driver. | ||||
|  | ||||
| Changes in 0.18.2 | ||||
| ================= | ||||
| - Replace deprecated PyMongo v2.x methods with their v3.x equivalents in the ``SequenceField``. #2097 | ||||
| - Various code clarity and documentation improvements. | ||||
|  | ||||
| Changes in 0.18.1 | ||||
| ================= | ||||
| - Fix a bug introduced in 0.18.0 which was causing ``Document.save`` to update all the fields instead of updating only the modified fields. This bug only occurred when using a custom PK. #2082 | ||||
| - Add Python 3.7 to Travis CI. #2058 | ||||
|  | ||||
| Changes in 0.18.0 | ||||
| ================= | ||||
| - Drop support for EOL'd MongoDB v2.6, v3.0, and v3.2. | ||||
| - MongoEngine now requires PyMongo >= v3.4. Travis CI now tests against MongoDB v3.4 – v3.6 and PyMongo v3.4 – v3.6. #2017 #2066 | ||||
| - Improve performance by avoiding a call to ``to_mongo`` in ``Document.save``. #2049 | ||||
| - Connection/disconnection improvements: | ||||
|     - Expose ``mongoengine.connection.disconnect`` and ``mongoengine.connection.disconnect_all``. | ||||
|     - Fix disconnecting. #566 #1599 #605 #607 #1213 #565 | ||||
|     - Improve documentation of ``connect``/``disconnect``. | ||||
|     - Fix issue when using multiple connections to the same mongo with different credentials. #2047 | ||||
|     - ``connect`` fails immediately when db name contains invalid characters. #2031 #1718 | ||||
| - Fix the default write concern of ``Document.save`` that was overwriting the connection write concern. #568 | ||||
| - Fix querying on ``List(EmbeddedDocument)`` subclasses fields. #1961 #1492 | ||||
| - Fix querying on ``(Generic)EmbeddedDocument`` subclasses fields. #475 | ||||
| - Fix ``QuerySet.aggregate`` so that it takes limit and skip value into account. #2029 | ||||
| - Generate unique indices for ``SortedListField`` and ``EmbeddedDocumentListFields``. #2020 | ||||
| - BREAKING CHANGE: Changed the behavior of a custom field validator (i.e ``validation`` parameter of a ``Field``). It is now expected to raise a ``ValidationError`` instead of returning ``True``/``False``. #2050 | ||||
| - BREAKING CHANGES (associated with connection/disconnection fixes): | ||||
|     - Calling ``connect`` 2 times with the same alias and different parameter will raise an error (should call ``disconnect`` first). | ||||
|     - ``disconnect`` now clears ``mongoengine.connection._connection_settings``. | ||||
|     - ``disconnect`` now clears the cached attribute ``Document._collection``. | ||||
| - BREAKING CHANGE: ``EmbeddedDocument.save`` & ``.reload`` no longer exist. #1552 | ||||
|  | ||||
| Changes in 0.17.0 | ||||
| ================= | ||||
| - POTENTIAL BREAKING CHANGE: All result fields are now passed, including internal fields (``_cls``, ``_id``) when using ``QuerySet.as_pymongo``. #1976 | ||||
| - Document a BREAKING CHANGE introduced in 0.15.3 and not reported at that time. #1995 | ||||
| - DEPRECATION: ``EmbeddedDocument.save`` & ``.reload`` are marked as deprecated and will be removed in a next version of MongoEngine. #1552 | ||||
| - Fix ``QuerySet.only`` working improperly after using ``QuerySet.count`` of the same instance of a ``QuerySet``. | ||||
| - Fix ``batch_size`` that was not copied when cloning a ``QuerySet`` object. #2011 | ||||
| - Fix ``InvalidStringData`` error when using ``modify`` on a ``BinaryField``. #1127 | ||||
| - Fix test suite and CI to support MongoDB v3.4. #1445 | ||||
| - Fix reference fields querying the database on each access if value contains orphan DBRefs. | ||||
|  | ||||
| Changes in 0.16.3 | ||||
| ================= | ||||
| - Fix ``$push`` with the ``$position`` operator not working with lists in embedded documents. #1965 | ||||
|  | ||||
| Changes in 0.16.2 | ||||
| ================= | ||||
| - Fix ``Document.save`` that fails when called with ``write_concern=None`` (regression of 0.16.1). #1958 | ||||
|  | ||||
| Changes in 0.16.1 | ||||
| ================= | ||||
| - Fix ``_cls`` that is not set properly in the ``Document`` constructor (regression). #1950 | ||||
| - Fix a bug in the ``_delta`` method - update of a ``ListField`` depends on an unrelated dynamic field update. #1733 | ||||
| - Remove PyMongo's deprecated ``Collection.save`` method and use ``Collection.insert_one`` instead. #1899 | ||||
|  | ||||
| Changes in 0.16.0 | ||||
| ================= | ||||
| - POTENTIAL BREAKING CHANGES: | ||||
|     - ``EmbeddedDocumentField`` will no longer accept references to Document classes in its constructor. #1661 | ||||
|     - Get rid of the ``basecls`` parameter from the ``DictField`` constructor (dead code). #1876 | ||||
|     - Default value of the ``ComplexDateTime`` field is now ``None`` (and no longer the current datetime). #1368 | ||||
| - Fix an unhashable ``TypeError`` when referencing a ``Document`` with a compound key in an ``EmbeddedDocument``. #1685 | ||||
| - Fix a bug where an ``EmbeddedDocument`` with the same id as its parent would not be tracked for changes. #1768 | ||||
| - Fix the fact that a bulk ``QuerySet.insert`` was not setting primary keys of inserted document instances. #1919 | ||||
| - Fix a bug when referencing an abstract class in a ``ReferenceField``. #1920 | ||||
| - Allow modifications to the document made in ``pre_save_post_validation`` to be taken into account. #1202 | ||||
| - Replace MongoDB v2.4 tests in Travis CI with MongoDB v3.2. #1903 | ||||
| - Fix side effects of using ``QuerySet.no_dereference`` on other documents. #1677 | ||||
| - Fix ``TypeError`` when using lazy Django translation objects as translated choices. #1879 | ||||
| - Improve Python 2-3 codebase compatibility. #1889 | ||||
| - Fix support for changing the default value of the ``ComplexDateTime`` field. #1368 | ||||
| - Improve error message in case an ``EmbeddedDocumentListField`` receives an ``EmbeddedDocument`` instance instead of a list. #1877 | ||||
| - Fix the ``inc`` and ``dec`` operators for the ``DecimalField``. #1517 #1320 | ||||
| - Ignore ``killcursors`` queries in ``query_counter`` context manager. #1869 | ||||
| - Fix the fact that ``query_counter`` was modifying the initial profiling level in case it was != 0. #1870 | ||||
| - Repair the ``no_sub_classes`` context manager + fix the fact that it was swallowing exceptions. #1865 | ||||
| - Fix index creation error that was swallowed by ``hasattr`` under Python 2. #1688 | ||||
| - ``QuerySet.limit`` function behaviour: Passing 0 as parameter will return all the documents in the cursor. #1611 | ||||
| - Bulk insert updates the IDs of the input documents instances. #1919 | ||||
| - Fix a harmless bug related to ``GenericReferenceField`` where modifications in the generic-referenced document were tracked in the parent. #1934 | ||||
| - Improve validation of the ``BinaryField``. #273 | ||||
| - Implement lazy regex compiling in Field classes to improve ``import mongoengine`` performance. #1806 | ||||
| - Update ``GridFSProxy.__str__``  so that it would always print both the filename and grid_id. #710 | ||||
| - Add ``__repr__`` to ``Q`` and ``QCombination`` classes. #1843 | ||||
| - Fix bug in the ``BaseList.__iter__`` operator (was occuring when modifying a BaseList while iterating over it). #1676 | ||||
| - Add a ``DateField``. #513 | ||||
| - Various improvements to the documentation. | ||||
| - Various code quality improvements. | ||||
|  | ||||
| Changes in 0.15.3 | ||||
| ================= | ||||
| - ``Queryset.update/update_one`` methods now return an ``UpdateResult`` when ``full_result=True`` is provided and no longer a dict. #1491 | ||||
| - Improve ``LazyReferenceField`` and ``GenericLazyReferenceField`` with nested fields. #1704 | ||||
| - Fix the subfield resolve error in ``generic_emdedded_document`` query. #1651 #1652 | ||||
| - Use each modifier only with ``$position``. #1673 #1675 | ||||
| - Fix validation errors in the ``GenericEmbeddedDocumentField``. #1067 | ||||
| - Update cached fields when a ``fields`` argument is given. #1712 | ||||
| - Add a ``db`` parameter to ``register_connection`` for compatibility with ``connect``. | ||||
| - Use PyMongo v3.x's ``insert_one`` and ``insert_many`` in ``Document.insert``. #1491 | ||||
| - Use PyMongo v3.x's ``update_one`` and ``update_many`` in ``Document.update`` and ``QuerySet.update``. #1491 | ||||
| - Fix how ``reload(fields)`` affects changed fields. #1371 | ||||
| - Fix a bug where the read-only access to the database fails when trying to create indexes. #1338 | ||||
|  | ||||
| Changes in 0.15.0 | ||||
| ================= | ||||
| - Add ``LazyReferenceField`` and ``GenericLazyReferenceField``. #1230 | ||||
|  | ||||
| Changes in 0.14.1 | ||||
| ================= | ||||
| - Remove ``SemiStrictDict`` and start using a regular dict for ``BaseDocument._data``. #1630 | ||||
| - Add support for the ``$position`` param in the ``$push`` operator. #1566 | ||||
| - Fix ``DateTimeField`` interpreting an empty string as today. #1533 | ||||
| - Add a missing ``__ne__`` method to the ``GridFSProxy`` class. #1632 | ||||
| - Fix ``BaseQuerySet._fields_to_db_fields``. #1553 | ||||
|  | ||||
| Changes in 0.14.0 | ||||
| ================= | ||||
| - BREAKING CHANGE: Remove the ``coerce_types`` param from ``QuerySet.as_pymongo``. #1549 | ||||
| - POTENTIAL BREAKING CHANGE: Make ``EmbeddedDocument`` not hashable by default. #1528 | ||||
| - Improve code quality. #1531, #1540, #1541, #1547 | ||||
|  | ||||
| Changes in 0.13.0 | ||||
| ================= | ||||
| - POTENTIAL BREAKING CHANGE: Added Unicode support to the ``EmailField``, see docs/upgrade.rst for details. | ||||
| - POTENTIAL BREAKING CHANGE: Added Unicode support to the `EmailField`, see | ||||
|   docs/upgrade.rst for details. | ||||
|  | ||||
| Changes in 0.12.0 | ||||
| ================= | ||||
| - POTENTIAL BREAKING CHANGE: Fix ``limit``/``skip``/``hint``/``batch_size`` chaining. #1476 | ||||
| - POTENTIAL BREAKING CHANGE: Change a public ``QuerySet.clone_into`` method to a private ``QuerySet._clone_into``. #1476 | ||||
| - Fix the way ``Document.objects.create`` works with duplicate IDs. #1485 | ||||
| - Fix connecting to a replica set with PyMongo 2.x. #1436 | ||||
| - Fix using sets in field choices. #1481 | ||||
| - Fix deleting items from a ``ListField``. #1318 | ||||
| - Fix an obscure error message when filtering by ``field__in=non_iterable``. #1237 | ||||
| - Fix behavior of a ``dec`` update operator. #1450 | ||||
| - Add a ``rename`` update operator. #1454 | ||||
| - Add validation for the ``db_field`` parameter. #1448 | ||||
| - Fix the error message displayed when querying an ``EmbeddedDocumentField`` by an invalid value. #1440 | ||||
| - Fix the error message displayed when validating Unicode URLs. #1486 | ||||
| - Raise an error when trying to save an abstract document. #1449 | ||||
| - POTENTIAL BREAKING CHANGE: Fixed limit/skip/hint/batch_size chaining #1476 | ||||
| - POTENTIAL BREAKING CHANGE: Changed a public `QuerySet.clone_into` method to a private `QuerySet._clone_into` #1476 | ||||
| - Fixed the way `Document.objects.create` works with duplicate IDs #1485 | ||||
| - Fixed connecting to a replica set with PyMongo 2.x #1436 | ||||
| - Fixed using sets in field choices #1481 | ||||
| - Fixed deleting items from a `ListField` #1318 | ||||
| - Fixed an obscure error message when filtering by `field__in=non_iterable`. #1237 | ||||
| - Fixed behavior of a `dec` update operator #1450 | ||||
| - Added a `rename` update operator #1454 | ||||
| - Added validation for the `db_field` parameter #1448 | ||||
| - Fixed the error message displayed when querying an `EmbeddedDocumentField` by an invalid value #1440 | ||||
| - Fixed the error message displayed when validating unicode URLs #1486 | ||||
| - Raise an error when trying to save an abstract document #1449 | ||||
|  | ||||
| Changes in 0.11.0 | ||||
| ================= | ||||
| - BREAKING CHANGE: Rename ``ConnectionError`` to ``MongoEngineConnectionError`` since the former is a built-in exception name in Python v3.x. #1428 | ||||
| - BREAKING CHANGE: Drop Python v2.6 support. #1428 | ||||
| - BREAKING CHANGE: ``from mongoengine.base import ErrorClass`` won't work anymore for any error from ``mongoengine.errors`` (e.g. ``ValidationError``). Use ``from mongoengine.errors import ErrorClass instead``. #1428 | ||||
| - BREAKING CHANGE: Accessing a broken reference will raise a ``DoesNotExist`` error. In the past it used to return ``None``. #1334 | ||||
| - Fix absent rounding for the ``DecimalField`` when ``force_string`` is set. #1103 | ||||
| - BREAKING CHANGE: Renamed `ConnectionError` to `MongoEngineConnectionError` since the former is a built-in exception name in Python v3.x. #1428 | ||||
| - BREAKING CHANGE: Dropped Python 2.6 support. #1428 | ||||
| - BREAKING CHANGE: `from mongoengine.base import ErrorClass` won't work anymore for any error from `mongoengine.errors` (e.g. `ValidationError`). Use `from mongoengine.errors import ErrorClass instead`. #1428 | ||||
| - BREAKING CHANGE: Accessing a broken reference will raise a `DoesNotExist` error. In the past it used to return `None`. #1334 | ||||
| - Fixed absent rounding for DecimalField when `force_string` is set. #1103 | ||||
|  | ||||
| Changes in 0.10.8 | ||||
| ================= | ||||
| - Add support for ``QuerySet.batch_size``. (#1426) | ||||
| - Fix a query set iteration within an iteration. #1427 | ||||
| - Fix an issue where specifying a MongoDB URI host would override more information than it should. #1421 | ||||
| - Add an ability to filter the ``GenericReferenceField`` by an ``ObjectId`` and a ``DBRef``. #1425 | ||||
| - Fix cascading deletes for models with a custom primary key field. #1247 | ||||
| - Add ability to specify an authentication mechanism (e.g. X.509). #1333 | ||||
| - Add support for falsy primary keys (e.g. ``doc.pk = 0``). #1354 | ||||
| - Fix ``QuerySet.sum/average`` for fields w/ an explicit ``db_field``. #1417 | ||||
| - Fix filtering by ``embedded_doc=None``. #1422 | ||||
| - Add support for ``Cursor.comment``. #1420 | ||||
| - Fix ``doc.get_<field>_display`` methods. #1419 | ||||
| - Fix the ``__repr__`` method of the ``StrictDict`` #1424 | ||||
| - Add a deprecation warning for Python v2.6. | ||||
| - Added support for QuerySet.batch_size (#1426) | ||||
| - Fixed query set iteration within iteration #1427 | ||||
| - Fixed an issue where specifying a MongoDB URI host would override more information than it should #1421 | ||||
| - Added ability to filter the generic reference field by ObjectId and DBRef #1425 | ||||
| - Fixed delete cascade for models with a custom primary key field #1247 | ||||
| - Added ability to specify an authentication mechanism (e.g. X.509) #1333 | ||||
| - Added support for falsey primary keys (e.g. doc.pk = 0) #1354 | ||||
| - Fixed QuerySet#sum/average for fields w/ explicit db_field #1417 | ||||
| - Fixed filtering by embedded_doc=None #1422 | ||||
| - Added support for cursor.comment #1420 | ||||
| - Fixed doc.get_<field>_display #1419 | ||||
| - Fixed __repr__ method of the StrictDict #1424 | ||||
| - Added a deprecation warning for Python 2.6 | ||||
|  | ||||
| Changes in 0.10.7 | ||||
| ================= | ||||
| - Drop Python 3.2 support #1390 | ||||
| - Fix a bug where a dynamic doc has an index inside a dict field. #1278 | ||||
| - Fix: ``ListField`` minus index assignment does not work. #1128 | ||||
| - Fix cascade delete mixing among collections. #1224 | ||||
| - Add ``signal_kwargs`` argument to ``Document.save``, ``Document.delete`` and ``BaseQuerySet.insert`` to be passed to signals calls. #1206 | ||||
| - Raise ``OperationError`` when trying to do a ``drop_collection`` on document with no collection set. | ||||
| - Fix a bug where a count on ``ListField`` of ``EmbeddedDocumentField`` fails. #1187 | ||||
| - Fix ``LongField`` values stored as int32 in Python 3. #1253 | ||||
| - ``MapField`` now handles unicode keys correctly. #1267 | ||||
| - ``ListField`` now handles negative indicies correctly. #1270 | ||||
| - Fix an ``AttributeError`` when initializing an ``EmbeddedDocument`` with positional args. #681 | ||||
| - Fix a ``no_cursor_timeout`` error with PyMongo v3.x. #1304 | ||||
| - Replace map-reduce based ``QuerySet.sum/average`` with aggregation-based implementations. #1336 | ||||
| - Fix support for ``__`` to escape field names that match operators' names in ``update``. #1351 | ||||
| - Fix ``BaseDocument._mark_as_changed``. #1369 | ||||
| - Add support for pickling ``QuerySet`` instances. #1397 | ||||
| - Fix connecting to a list of hosts. #1389 | ||||
| - Fix a bug where accessing broken references wouldn't raise a ``DoesNotExist`` error. #1334 | ||||
| - Fix not being able to specify ``use_db_field=False`` on ``ListField(EmbeddedDocumentField)`` instances. #1218 | ||||
| - Improvements to the dictionary field's docs. #1383 | ||||
| - Dropped Python 3.2 support #1390 | ||||
| - Fixed the bug where dynamic doc has index inside a dict field #1278 | ||||
| - Fixed: ListField minus index assignment does not work #1128 | ||||
| - Fixed cascade delete mixing among collections #1224 | ||||
| - Add `signal_kwargs` argument to `Document.save`, `Document.delete` and `BaseQuerySet.insert` to be passed to signals calls #1206 | ||||
| - Raise `OperationError` when trying to do a `drop_collection` on document with no collection set. | ||||
| - count on ListField of EmbeddedDocumentField fails. #1187 | ||||
| - Fixed long fields stored as int32 in Python 3. #1253 | ||||
| - MapField now handles unicodes keys correctly. #1267 | ||||
| - ListField now handles negative indicies correctly. #1270 | ||||
| - Fixed AttributeError when initializing EmbeddedDocument with positional args. #681 | ||||
| - Fixed no_cursor_timeout error with pymongo 3.0+ #1304 | ||||
| - Replaced map-reduce based QuerySet.sum/average with aggregation-based implementations #1336 | ||||
| - Fixed support for `__` to escape field names that match operators names in `update` #1351 | ||||
| - Fixed BaseDocument#_mark_as_changed #1369 | ||||
| - Added support for pickling QuerySet instances. #1397 | ||||
| - Fixed connecting to a list of hosts #1389 | ||||
| - Fixed a bug where accessing broken references wouldn't raise a DoesNotExist error #1334 | ||||
| - Fixed not being able to specify use_db_field=False on ListField(EmbeddedDocumentField) instances #1218 | ||||
| - Improvements to the dictionary fields docs #1383 | ||||
|  | ||||
| Changes in 0.10.6 | ||||
| ================= | ||||
| - Add support for mocking MongoEngine based on mongomock. #1151 | ||||
| - Fix not being able to run tests on Windows. #1153 | ||||
| - Fixed not being able to run tests on Windows. #1153 | ||||
| - Allow creation of sparse compound indexes. #1114 | ||||
| - count on ListField of EmbeddedDocumentField fails. #1187 | ||||
|  | ||||
| Changes in 0.10.5 | ||||
| ================= | ||||
| @@ -245,12 +87,12 @@ Changes in 0.10.5 | ||||
|  | ||||
| Changes in 0.10.4 | ||||
| ================= | ||||
| - ``SaveConditionError`` is now importable from the top level package. #1165 | ||||
| - Add a ``QuerySet.upsert_one`` method. #1157 | ||||
| - SaveConditionError is now importable from the top level package. #1165 | ||||
| - upsert_one method added. #1157 | ||||
|  | ||||
| Changes in 0.10.3 | ||||
| ================= | ||||
| - Fix ``read_preference`` (it had chaining issues with PyMongo v2.x and it didn't work at all with PyMongo v3.x). #1042 | ||||
| - Fix `read_preference` (it had chaining issues with PyMongo 2.x and it didn't work at all with PyMongo 3.x) #1042 | ||||
|  | ||||
| Changes in 0.10.2 | ||||
| ================= | ||||
| @@ -260,16 +102,16 @@ Changes in 0.10.2 | ||||
|  | ||||
| Changes in 0.10.1 | ||||
| ================= | ||||
| - Fix infinite recursion with cascade delete rules under specific conditions. #1046 | ||||
| - Fix ``CachedReferenceField`` bug when loading cached docs as ``DBRef`` but failing to save them. #1047 | ||||
| - Fix ignored chained options. #842 | ||||
| - ``Document.save``'s ``save_condition`` error raises a ``SaveConditionError`` exception. #1070 | ||||
| - Fix ``Document.reload`` for the ``DynamicDocument``. #1050 | ||||
| - ``StrictDict`` & ``SemiStrictDict`` are shadowed at init time. #1105 | ||||
| - Fix ``ListField`` negative index assignment not working. #1119 | ||||
| - Remove code that marks a field as changed when the field has a default value but does not exist in the database. #1126 | ||||
| - Remove test dependencies (nose and rednose) from install dependencies. #1079 | ||||
| - Recursively build a query when using the ``elemMatch`` operator. #1130 | ||||
| - Fix infinite recursion with CASCADE delete rules under specific conditions. #1046 | ||||
| - Fix CachedReferenceField bug when loading cached docs as DBRef but failing to save them. #1047 | ||||
| - Fix ignored chained options #842 | ||||
| - Document save's save_condition error raises `SaveConditionError` exception #1070 | ||||
| - Fix Document.reload for DynamicDocument. #1050 | ||||
| - StrictDict & SemiStrictDict are shadowed at init time. #1105 | ||||
| - Fix ListField minus index assignment does not work. #1119 | ||||
| - Remove code that marks field as changed when the field has default but not existed in database #1126 | ||||
| - Remove test dependencies (nose and rednose) from install dependencies list. #1079 | ||||
| - Recursively build query when using elemMatch operator. #1130 | ||||
| - Fix instance back references for lists of embedded documents. #1131 | ||||
|  | ||||
| Changes in 0.10.0 | ||||
| @@ -280,7 +122,7 @@ Changes in 0.10.0 | ||||
| - Removed get_or_create() deprecated since 0.8.0. #300 | ||||
| - Improve Document._created status when switch collection and db #1020 | ||||
| - Queryset update doesn't go through field validation #453 | ||||
| - Added support for specifying authentication source as option ``authSource`` in URI. #967 | ||||
| - Added support for specifying authentication source as option `authSource` in URI. #967 | ||||
| - Fixed mark_as_changed to handle higher/lower level fields changed. #927 | ||||
| - ListField of embedded docs doesn't set the _instance attribute when iterating over it #914 | ||||
| - Support += and *= for ListField #595 | ||||
| @@ -296,7 +138,7 @@ Changes in 0.10.0 | ||||
| - Fixes some internal _id handling issue. #961 | ||||
| - Updated URL and Email Field regex validators, added schemes argument to URLField validation. #652 | ||||
| - Capped collection multiple of 256. #1011 | ||||
| - Added ``BaseQuerySet.aggregate_sum`` and ``BaseQuerySet.aggregate_average`` methods. | ||||
| - Added `BaseQuerySet.aggregate_sum` and `BaseQuerySet.aggregate_average` methods. | ||||
| - Fix for delete with write_concern {'w': 0}. #1008 | ||||
| - Allow dynamic lookup for more than two parts. #882 | ||||
| - Added support for min_distance on geo queries. #831 | ||||
| @@ -305,10 +147,10 @@ Changes in 0.10.0 | ||||
| Changes in 0.9.0 | ||||
| ================ | ||||
| - Update FileField when creating a new file #714 | ||||
| - Added ``EmbeddedDocumentListField`` for Lists of Embedded Documents. #826 | ||||
| - Added `EmbeddedDocumentListField` for Lists of Embedded Documents. #826 | ||||
| - ComplexDateTimeField should fall back to None when null=True #864 | ||||
| - Request Support for $min, $max Field update operators #863 | ||||
| - ``BaseDict`` does not follow ``setdefault`` #866 | ||||
| - `BaseDict` does not follow `setdefault` #866 | ||||
| - Add support for $type operator # 766 | ||||
| - Fix tests for pymongo 2.8+ #877 | ||||
| - No module named 'django.utils.importlib' (Django dev) #872 | ||||
| @@ -329,13 +171,13 @@ Changes in 0.9.0 | ||||
| - Stop ensure_indexes running on a secondaries unless connection is through mongos #746 | ||||
| - Not overriding default values when loading a subset of fields #399 | ||||
| - Saving document doesn't create new fields in existing collection #620 | ||||
| - Added ``Queryset.aggregate`` wrapper to aggregation framework #703 | ||||
| - Added `Queryset.aggregate` wrapper to aggregation framework #703 | ||||
| - Added support to show original model fields on to_json calls instead of db_field #697 | ||||
| - Added Queryset.search_text to Text indexes searchs #700 | ||||
| - Fixed tests for Django 1.7 #696 | ||||
| - Follow ReferenceFields in EmbeddedDocuments with select_related #690 | ||||
| - Added preliminary support for text indexes #680 | ||||
| - Added ``elemMatch`` operator as well - ``match`` is too obscure #653 | ||||
| - Added `elemMatch` operator as well - `match` is too obscure #653 | ||||
| - Added support for progressive JPEG #486 #548 | ||||
| - Allow strings to be used in index creation #675 | ||||
| - Fixed EmbeddedDoc weakref proxy issue #592 | ||||
| @@ -371,11 +213,11 @@ Changes in 0.9.0 | ||||
| - Increase email field length to accommodate new TLDs #726 | ||||
| - index_cls is ignored when deciding to set _cls as index prefix #733 | ||||
| - Make 'db' argument to connection optional #737 | ||||
| - Allow atomic update for the entire ``DictField`` #742 | ||||
| - Allow atomic update for the entire `DictField` #742 | ||||
| - Added MultiPointField, MultiLineField, MultiPolygonField | ||||
| - Fix multiple connections aliases being rewritten #748 | ||||
| - Fixed a few instances where reverse_delete_rule was written as reverse_delete_rules. #791 | ||||
| - Make ``in_bulk()`` respect ``no_dereference()`` #775 | ||||
| - Make `in_bulk()` respect `no_dereference()` #775 | ||||
| - Handle None from model __str__; Fixes #753 #754 | ||||
| - _get_changed_fields fix for embedded documents with id field. #925 | ||||
|  | ||||
| @@ -429,18 +271,18 @@ Changes in 0.8.4 | ||||
|  | ||||
| Changes in 0.8.3 | ||||
| ================ | ||||
| - Fixed EmbeddedDocuments with ``id`` also storing ``_id`` (#402) | ||||
| - Fixed EmbeddedDocuments with `id` also storing `_id` (#402) | ||||
| - Added get_proxy_object helper to filefields (#391) | ||||
| - Added QuerySetNoCache and QuerySet.no_cache() for lower memory consumption (#365) | ||||
| - Fixed sum and average mapreduce dot notation support (#375, #376, #393) | ||||
| - Fixed as_pymongo to return the id (#386) | ||||
| - Document.select_related() now respects ``db_alias`` (#377) | ||||
| - Document.select_related() now respects `db_alias` (#377) | ||||
| - Reload uses shard_key if applicable (#384) | ||||
| - Dynamic fields are ordered based on creation and stored in _fields_ordered (#396) | ||||
|  | ||||
|   **Potential breaking change:** http://docs.mongoengine.org/en/latest/upgrade.html#to-0-8-3 | ||||
|  | ||||
| - Fixed pickling dynamic documents ``_dynamic_fields`` (#387) | ||||
| - Fixed pickling dynamic documents `_dynamic_fields` (#387) | ||||
| - Fixed ListField setslice and delslice dirty tracking (#390) | ||||
| - Added Django 1.5 PY3 support (#392) | ||||
| - Added match ($elemMatch) support for EmbeddedDocuments (#379) | ||||
| @@ -481,7 +323,7 @@ Changes in 0.8.0 | ||||
| ================ | ||||
| - Fixed querying ReferenceField custom_id (#317) | ||||
| - Fixed pickle issues with collections (#316) | ||||
| - Added ``get_next_value`` preview for SequenceFields (#319) | ||||
| - Added `get_next_value` preview for SequenceFields (#319) | ||||
| - Added no_sub_classes context manager and queryset helper (#312) | ||||
| - Querysets now utilises a local cache | ||||
| - Changed __len__ behaviour in the queryset (#247, #311) | ||||
| @@ -510,7 +352,7 @@ Changes in 0.8.0 | ||||
| - Updated connection to use MongoClient (#262, #274) | ||||
| - Fixed db_alias and inherited Documents (#143) | ||||
| - Documentation update for document errors (#124) | ||||
| - Deprecated ``get_or_create`` (#35) | ||||
| - Deprecated `get_or_create` (#35) | ||||
| - Updated inheritable objects created by upsert now contain _cls (#118) | ||||
| - Added support for creating documents with embedded documents in a single operation (#6) | ||||
| - Added to_json and from_json to Document (#1) | ||||
| @@ -631,7 +473,7 @@ Changes in 0.7.0 | ||||
| - Fixed UnboundLocalError in composite index with pk field (#88) | ||||
| - Updated ReferenceField's to optionally store ObjectId strings | ||||
|   this will become the default in 0.8 (#89) | ||||
| - Added FutureWarning - save will default to ``cascade=False`` in 0.8 | ||||
| - Added FutureWarning - save will default to `cascade=False` in 0.8 | ||||
| - Added example of indexing embedded document fields (#75) | ||||
| - Fixed ImageField resizing when forcing size (#80) | ||||
| - Add flexibility for fields handling bad data (#78) | ||||
| @@ -727,7 +569,7 @@ Changes in 0.6.8 | ||||
| ================ | ||||
| - Fixed FileField losing reference when no default set | ||||
| - Removed possible race condition from FileField (grid_file) | ||||
| - Added assignment to save, can now do: ``b = MyDoc(**kwargs).save()`` | ||||
| - Added assignment to save, can now do: `b = MyDoc(**kwargs).save()` | ||||
| - Added support for pull operations on nested EmbeddedDocuments | ||||
| - Added support for choices with GenericReferenceFields | ||||
| - Added support for choices with GenericEmbeddedDocumentFields | ||||
| @@ -742,7 +584,7 @@ Changes in 0.6.7 | ||||
| - Fixed indexing on '_id' or 'pk' or 'id' | ||||
| - Invalid data from the DB now raises a InvalidDocumentError | ||||
| - Cleaned up the Validation Error - docs and code | ||||
| - Added meta ``auto_create_index`` so you can disable index creation | ||||
| - Added meta `auto_create_index` so you can disable index creation | ||||
| - Added write concern options to inserts | ||||
| - Fixed typo in meta for index options | ||||
| - Bug fix Read preference now passed correctly | ||||
| @@ -783,6 +625,7 @@ Changes in 0.6.1 | ||||
|  | ||||
| Changes in 0.6 | ||||
| ============== | ||||
|  | ||||
| - Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7 | ||||
| - Added support for covered indexes when inheritance is off | ||||
| - No longer always upsert on save for items with a '_id' | ||||
| @@ -1007,6 +850,7 @@ Changes in v0.1.3 | ||||
|   querying takes place | ||||
| - A few minor bugfixes | ||||
|  | ||||
|  | ||||
| Changes in v0.1.2 | ||||
| ================= | ||||
| - Query values may be processed before before being used in queries | ||||
| @@ -1015,6 +859,7 @@ Changes in v0.1.2 | ||||
| - Added ``BooleanField`` | ||||
| - Added ``Document.reload()`` method | ||||
|  | ||||
|  | ||||
| Changes in v0.1.1 | ||||
| ================= | ||||
| - Documents may now use capped collections | ||||
|   | ||||
| @@ -1,19 +1,16 @@ | ||||
| from mongoengine import * | ||||
|  | ||||
| connect("tumblelog") | ||||
|  | ||||
| connect('tumblelog') | ||||
|  | ||||
| class Comment(EmbeddedDocument): | ||||
|     content = StringField() | ||||
|     name = StringField(max_length=120) | ||||
|  | ||||
|  | ||||
| class User(Document): | ||||
|     email = StringField(required=True) | ||||
|     first_name = StringField(max_length=50) | ||||
|     last_name = StringField(max_length=50) | ||||
|  | ||||
|  | ||||
| class Post(Document): | ||||
|     title = StringField(max_length=120, required=True) | ||||
|     author = ReferenceField(User) | ||||
| @@ -21,57 +18,54 @@ class Post(Document): | ||||
|     comments = ListField(EmbeddedDocumentField(Comment)) | ||||
|  | ||||
|     # bugfix | ||||
|     meta = {"allow_inheritance": True} | ||||
|     meta = {'allow_inheritance': True} | ||||
|  | ||||
|  | ||||
| class TextPost(Post): | ||||
|     content = StringField() | ||||
|  | ||||
|  | ||||
| class ImagePost(Post): | ||||
|     image_path = StringField() | ||||
|  | ||||
|  | ||||
| class LinkPost(Post): | ||||
|     link_url = StringField() | ||||
|  | ||||
|  | ||||
| Post.drop_collection() | ||||
|  | ||||
| john = User(email="jdoe@example.com", first_name="John", last_name="Doe") | ||||
| john = User(email='jdoe@example.com', first_name='John', last_name='Doe') | ||||
| john.save() | ||||
|  | ||||
| post1 = TextPost(title="Fun with MongoEngine", author=john) | ||||
| post1.content = "Took a look at MongoEngine today, looks pretty cool." | ||||
| post1.tags = ["mongodb", "mongoengine"] | ||||
| post1 = TextPost(title='Fun with MongoEngine', author=john) | ||||
| post1.content = 'Took a look at MongoEngine today, looks pretty cool.' | ||||
| post1.tags = ['mongodb', 'mongoengine'] | ||||
| post1.save() | ||||
|  | ||||
| post2 = LinkPost(title="MongoEngine Documentation", author=john) | ||||
| post2.link_url = "http://tractiondigital.com/labs/mongoengine/docs" | ||||
| post2.tags = ["mongoengine"] | ||||
| post2 = LinkPost(title='MongoEngine Documentation', author=john) | ||||
| post2.link_url = 'http://tractiondigital.com/labs/mongoengine/docs' | ||||
| post2.tags = ['mongoengine'] | ||||
| post2.save() | ||||
|  | ||||
| print("ALL POSTS") | ||||
| print() | ||||
| print 'ALL POSTS' | ||||
| print | ||||
| for post in Post.objects: | ||||
|     print(post.title) | ||||
|     # print '=' * post.title.count() | ||||
|     print("=" * 20) | ||||
|     print post.title | ||||
|     #print '=' * post.title.count() | ||||
|     print "=" * 20 | ||||
|  | ||||
|     if isinstance(post, TextPost): | ||||
|         print(post.content) | ||||
|         print post.content | ||||
|  | ||||
|     if isinstance(post, LinkPost): | ||||
|         print("Link:", post.link_url) | ||||
|         print 'Link:', post.link_url | ||||
|  | ||||
|     print() | ||||
| print() | ||||
|     print | ||||
| print | ||||
|  | ||||
| print("POSTS TAGGED 'MONGODB'") | ||||
| print() | ||||
| for post in Post.objects(tags="mongodb"): | ||||
|     print(post.title) | ||||
| print() | ||||
| print 'POSTS TAGGED \'MONGODB\'' | ||||
| print | ||||
| for post in Post.objects(tags='mongodb'): | ||||
|     print post.title | ||||
| print | ||||
|  | ||||
| num_posts = Post.objects(tags="mongodb").count() | ||||
| print('Found %d posts with tag "mongodb"' % num_posts) | ||||
| num_posts = Post.objects(tags='mongodb').count() | ||||
| print 'Found %d posts with tag "mongodb"' % num_posts | ||||
|   | ||||
							
								
								
									
										96
									
								
								docs/conf.py
									
									
									
									
									
								
							
							
						
						
									
										96
									
								
								docs/conf.py
									
									
									
									
									
								
							| @@ -11,8 +11,7 @@ | ||||
| # All configuration values have a default; values that are commented out | ||||
| # serve to show the default. | ||||
|  | ||||
| import os | ||||
| import sys | ||||
| import sys, os | ||||
|  | ||||
| import sphinx_rtd_theme | ||||
|  | ||||
| @@ -21,29 +20,29 @@ import mongoengine | ||||
| # If extensions (or modules to document with autodoc) are in another directory, | ||||
| # add these directories to sys.path here. If the directory is relative to the | ||||
| # documentation root, use os.path.abspath to make it absolute, like shown here. | ||||
| sys.path.insert(0, os.path.abspath("..")) | ||||
| sys.path.insert(0, os.path.abspath('..')) | ||||
|  | ||||
| # -- General configuration ----------------------------------------------------- | ||||
|  | ||||
| # Add any Sphinx extension module names here, as strings. They can be extensions | ||||
| # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. | ||||
| extensions = ["sphinx.ext.autodoc", "sphinx.ext.todo"] | ||||
| extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo'] | ||||
|  | ||||
| # Add any paths that contain templates here, relative to this directory. | ||||
| templates_path = ["_templates"] | ||||
| templates_path = ['_templates'] | ||||
|  | ||||
| # The suffix of source filenames. | ||||
| source_suffix = ".rst" | ||||
| source_suffix = '.rst' | ||||
|  | ||||
| # The encoding of source files. | ||||
| # source_encoding = 'utf-8' | ||||
| #source_encoding = 'utf-8' | ||||
|  | ||||
| # The master toctree document. | ||||
| master_doc = "index" | ||||
| master_doc = 'index' | ||||
|  | ||||
| # General information about the project. | ||||
| project = u"MongoEngine" | ||||
| copyright = u"2009, MongoEngine Authors" | ||||
| project = u'MongoEngine' | ||||
| copyright = u'2009, MongoEngine Authors' | ||||
|  | ||||
| # The version info for the project you're documenting, acts as replacement for | ||||
| # |version| and |release|, also used in various other places throughout the | ||||
| @@ -56,66 +55,68 @@ release = mongoengine.get_version() | ||||
|  | ||||
| # The language for content autogenerated by Sphinx. Refer to documentation | ||||
| # for a list of supported languages. | ||||
| # language = None | ||||
| #language = None | ||||
|  | ||||
| # There are two options for replacing |today|: either, you set today to some | ||||
| # non-false value, then it is used: | ||||
| # today = '' | ||||
| #today = '' | ||||
| # Else, today_fmt is used as the format for a strftime call. | ||||
| # today_fmt = '%B %d, %Y' | ||||
| #today_fmt = '%B %d, %Y' | ||||
|  | ||||
| # List of documents that shouldn't be included in the build. | ||||
| # unused_docs = [] | ||||
| #unused_docs = [] | ||||
|  | ||||
| # List of directories, relative to source directory, that shouldn't be searched | ||||
| # for source files. | ||||
| exclude_trees = ["_build"] | ||||
| exclude_trees = ['_build'] | ||||
|  | ||||
| # The reST default role (used for this markup: `text`) to use for all documents. | ||||
| # default_role = None | ||||
| #default_role = None | ||||
|  | ||||
| # If true, '()' will be appended to :func: etc. cross-reference text. | ||||
| # add_function_parentheses = True | ||||
| #add_function_parentheses = True | ||||
|  | ||||
| # If true, the current module name will be prepended to all description | ||||
| # unit titles (such as .. function::). | ||||
| # add_module_names = True | ||||
| #add_module_names = True | ||||
|  | ||||
| # If true, sectionauthor and moduleauthor directives will be shown in the | ||||
| # output. They are ignored by default. | ||||
| # show_authors = False | ||||
| #show_authors = False | ||||
|  | ||||
| # The name of the Pygments (syntax highlighting) style to use. | ||||
| pygments_style = "sphinx" | ||||
| pygments_style = 'sphinx' | ||||
|  | ||||
| # A list of ignored prefixes for module index sorting. | ||||
| # modindex_common_prefix = [] | ||||
| #modindex_common_prefix = [] | ||||
|  | ||||
|  | ||||
| # -- Options for HTML output --------------------------------------------------- | ||||
|  | ||||
| # The theme to use for HTML and HTML Help pages.  Major themes that come with | ||||
| # Sphinx are currently 'default' and 'sphinxdoc'. | ||||
| html_theme = "sphinx_rtd_theme" | ||||
| html_theme = 'sphinx_rtd_theme' | ||||
|  | ||||
| # Theme options are theme-specific and customize the look and feel of a theme | ||||
| # further.  For a list of options available for each theme, see the | ||||
| # documentation. | ||||
| html_theme_options = {"canonical_url": "http://docs.mongoengine.org/en/latest/"} | ||||
| html_theme_options = { | ||||
|     'canonical_url': 'http://docs.mongoengine.org/en/latest/' | ||||
| } | ||||
|  | ||||
| # Add any paths that contain custom themes here, relative to this directory. | ||||
| html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] | ||||
|  | ||||
| # The name for this set of Sphinx documents.  If None, it defaults to | ||||
| # "<project> v<release> documentation". | ||||
| # html_title = None | ||||
| #html_title = None | ||||
|  | ||||
| # A shorter title for the navigation bar.  Default is the same as html_title. | ||||
| # html_short_title = None | ||||
| #html_short_title = None | ||||
|  | ||||
| # The name of an image file (relative to this directory) to place at the top | ||||
| # of the sidebar. | ||||
| # html_logo = None | ||||
| #html_logo = None | ||||
|  | ||||
| # The name of an image file (within the static path) to use as favicon of the | ||||
| # docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32 | ||||
| @@ -125,11 +126,11 @@ html_favicon = "favicon.ico" | ||||
| # Add any paths that contain custom static files (such as style sheets) here, | ||||
| # relative to this directory. They are copied after the builtin static files, | ||||
| # so a file named "default.css" will overwrite the builtin "default.css". | ||||
| # html_static_path = ['_static'] | ||||
| #html_static_path = ['_static'] | ||||
|  | ||||
| # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, | ||||
| # using the given strftime format. | ||||
| # html_last_updated_fmt = '%b %d, %Y' | ||||
| #html_last_updated_fmt = '%b %d, %Y' | ||||
|  | ||||
| # If true, SmartyPants will be used to convert quotes and dashes to | ||||
| # typographically correct entities. | ||||
| @@ -137,68 +138,69 @@ html_use_smartypants = True | ||||
|  | ||||
| # Custom sidebar templates, maps document names to template names. | ||||
| html_sidebars = { | ||||
|     "index": ["globaltoc.html", "searchbox.html"], | ||||
|     "**": ["localtoc.html", "relations.html", "searchbox.html"], | ||||
|     'index': ['globaltoc.html', 'searchbox.html'], | ||||
|     '**': ['localtoc.html', 'relations.html', 'searchbox.html'] | ||||
| } | ||||
|  | ||||
|  | ||||
| # Additional templates that should be rendered to pages, maps page names to | ||||
| # template names. | ||||
| # html_additional_pages = {} | ||||
| #html_additional_pages = {} | ||||
|  | ||||
| # If false, no module index is generated. | ||||
| # html_use_modindex = True | ||||
| #html_use_modindex = True | ||||
|  | ||||
| # If false, no index is generated. | ||||
| # html_use_index = True | ||||
| #html_use_index = True | ||||
|  | ||||
| # If true, the index is split into individual pages for each letter. | ||||
| # html_split_index = False | ||||
| #html_split_index = False | ||||
|  | ||||
| # If true, links to the reST sources are added to the pages. | ||||
| # html_show_sourcelink = True | ||||
| #html_show_sourcelink = True | ||||
|  | ||||
| # If true, an OpenSearch description file will be output, and all pages will | ||||
| # contain a <link> tag referring to it.  The value of this option must be the | ||||
| # base URL from which the finished HTML is served. | ||||
| # html_use_opensearch = '' | ||||
| #html_use_opensearch = '' | ||||
|  | ||||
| # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). | ||||
| # html_file_suffix = '' | ||||
| #html_file_suffix = '' | ||||
|  | ||||
| # Output file base name for HTML help builder. | ||||
| htmlhelp_basename = "MongoEnginedoc" | ||||
| htmlhelp_basename = 'MongoEnginedoc' | ||||
|  | ||||
|  | ||||
| # -- Options for LaTeX output -------------------------------------------------- | ||||
|  | ||||
| # The paper size ('letter' or 'a4'). | ||||
| latex_paper_size = "a4" | ||||
| latex_paper_size = 'a4' | ||||
|  | ||||
| # The font size ('10pt', '11pt' or '12pt'). | ||||
| # latex_font_size = '10pt' | ||||
| #latex_font_size = '10pt' | ||||
|  | ||||
| # Grouping the document tree into LaTeX files. List of tuples | ||||
| # (source start file, target name, title, author, documentclass [howto/manual]). | ||||
| latex_documents = [ | ||||
|     ("index", "MongoEngine.tex", "MongoEngine Documentation", "Ross Lawley", "manual") | ||||
|   ('index', 'MongoEngine.tex', 'MongoEngine Documentation', | ||||
|    'Ross Lawley', 'manual'), | ||||
| ] | ||||
|  | ||||
| # The name of an image file (relative to this directory) to place at the top of | ||||
| # the title page. | ||||
| # latex_logo = None | ||||
| #latex_logo = None | ||||
|  | ||||
| # For "manual" documents, if this is true, then toplevel headings are parts, | ||||
| # not chapters. | ||||
| # latex_use_parts = False | ||||
| #latex_use_parts = False | ||||
|  | ||||
| # Additional stuff for the LaTeX preamble. | ||||
| # latex_preamble = '' | ||||
| #latex_preamble = '' | ||||
|  | ||||
| # Documents to append as an appendix to all manuals. | ||||
| # latex_appendices = [] | ||||
| #latex_appendices = [] | ||||
|  | ||||
| # If false, no module index is generated. | ||||
| # latex_use_modindex = True | ||||
| #latex_use_modindex = True | ||||
|  | ||||
| autoclass_content = "both" | ||||
| autoclass_content = 'both' | ||||
|   | ||||
							
								
								
									
										13
									
								
								docs/faq.rst
									
									
									
									
									
								
							
							
						
						
									
										13
									
								
								docs/faq.rst
									
									
									
									
									
								
							| @@ -1,13 +0,0 @@ | ||||
| ========================== | ||||
| Frequently Asked Questions | ||||
| ========================== | ||||
|  | ||||
| Does MongoEngine support asynchronous drivers (Motor, TxMongo)? | ||||
| --------------------------------------------------------------- | ||||
|  | ||||
| No, MongoEngine is exclusively based on PyMongo and isn't designed to support other driver. | ||||
| If this is a requirement for your project, check the alternative:  `uMongo`_ and `MotorEngine`_. | ||||
|  | ||||
| .. _uMongo: https://umongo.readthedocs.io/ | ||||
| .. _MotorEngine: https://motorengine.readthedocs.io/ | ||||
|  | ||||
| @@ -4,11 +4,9 @@ | ||||
| Connecting to MongoDB | ||||
| ===================== | ||||
|  | ||||
| Connections in MongoEngine are registered globally and are identified with aliases. | ||||
| If no `alias` is provided during the connection, it will use "default" as alias. | ||||
|  | ||||
| To connect to a running instance of :program:`mongod`, use the :func:`~mongoengine.connect` | ||||
| function. The first argument is the name of the database to connect to:: | ||||
| To connect to a running instance of :program:`mongod`, use the | ||||
| :func:`~mongoengine.connect` function. The first argument is the name of the | ||||
| database to connect to:: | ||||
|  | ||||
|     from mongoengine import connect | ||||
|     connect('project1') | ||||
| @@ -20,10 +18,10 @@ provide the :attr:`host` and :attr:`port` arguments to | ||||
|  | ||||
|     connect('project1', host='192.168.1.35', port=12345) | ||||
|  | ||||
| If the database requires authentication, :attr:`username`, :attr:`password` | ||||
| and :attr:`authentication_source` arguments should be provided:: | ||||
| If the database requires authentication, :attr:`username` and :attr:`password` | ||||
| arguments should be provided:: | ||||
|  | ||||
|     connect('project1', username='webapp', password='pwd123', authentication_source='admin') | ||||
|     connect('project1', username='webapp', password='pwd123') | ||||
|  | ||||
| URI style connections are also supported -- just supply the URI as | ||||
| the :attr:`host` to | ||||
| @@ -44,9 +42,6 @@ the :attr:`host` to | ||||
|     will establish connection to ``production`` database using | ||||
|     ``admin`` username and ``qwerty`` password. | ||||
|  | ||||
| .. note:: Calling :func:`~mongoengine.connect` without argument will establish | ||||
|     a connection to the "test" database by default | ||||
|  | ||||
| Replica Sets | ||||
| ============ | ||||
|  | ||||
| @@ -76,61 +71,28 @@ is used. | ||||
| In the background this uses :func:`~mongoengine.register_connection` to | ||||
| store the data and you can register all aliases up front if required. | ||||
|  | ||||
| Documents defined in different database | ||||
| --------------------------------------- | ||||
| Individual documents can be attached to different databases by providing a | ||||
| Individual documents can also support multiple databases by providing a | ||||
| `db_alias` in their meta data. This allows :class:`~pymongo.dbref.DBRef` | ||||
| objects to point across databases and collections. Below is an example schema, | ||||
| using 3 different databases to store data:: | ||||
|  | ||||
|         connect(alias='user-db-alias', db='user-db') | ||||
|         connect(alias='book-db-alias', db='book-db') | ||||
|         connect(alias='users-books-db-alias', db='users-books-db') | ||||
|          | ||||
|         class User(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|             meta = {'db_alias': 'user-db-alias'} | ||||
|             meta = {'db_alias': 'user-db'} | ||||
|  | ||||
|         class Book(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|             meta = {'db_alias': 'book-db-alias'} | ||||
|             meta = {'db_alias': 'book-db'} | ||||
|  | ||||
|         class AuthorBooks(Document): | ||||
|             author = ReferenceField(User) | ||||
|             book = ReferenceField(Book) | ||||
|  | ||||
|             meta = {'db_alias': 'users-books-db-alias'} | ||||
|             meta = {'db_alias': 'users-books-db'} | ||||
|  | ||||
|  | ||||
| Disconnecting an existing connection | ||||
| ------------------------------------ | ||||
| The function :func:`~mongoengine.disconnect` can be used to | ||||
| disconnect a particular connection. This can be used to change a | ||||
| connection globally:: | ||||
|  | ||||
|         from mongoengine import connect, disconnect | ||||
|         connect('a_db', alias='db1') | ||||
|  | ||||
|         class User(Document): | ||||
|             name = StringField() | ||||
|             meta = {'db_alias': 'db1'} | ||||
|  | ||||
|         disconnect(alias='db1') | ||||
|  | ||||
|         connect('another_db', alias='db1') | ||||
|  | ||||
| .. note:: Calling :func:`~mongoengine.disconnect` without argument | ||||
|     will disconnect the "default" connection | ||||
|  | ||||
| .. note:: Since connections gets registered globally, it is important | ||||
|     to use the `disconnect` function from MongoEngine and not the | ||||
|     `disconnect()` method of an existing connection (pymongo.MongoClient) | ||||
|  | ||||
| .. note:: :class:`~mongoengine.Document` are caching the pymongo collection. | ||||
|     using `disconnect` ensures that it gets cleaned as well | ||||
|  | ||||
| Context Managers | ||||
| ================ | ||||
| Sometimes you may want to switch the database or collection to query against. | ||||
| @@ -157,7 +119,7 @@ access to the same User document across databases:: | ||||
|  | ||||
| Switch Collection | ||||
| ----------------- | ||||
| The :func:`~mongoengine.context_managers.switch_collection` context manager | ||||
| The :class:`~mongoengine.context_managers.switch_collection` context manager | ||||
| allows you to change the collection for a given class allowing quick and easy | ||||
| access to the same Group document across collection:: | ||||
|  | ||||
|   | ||||
| @@ -22,7 +22,7 @@ objects** as class attributes to the document class:: | ||||
|  | ||||
|     class Page(Document): | ||||
|         title = StringField(max_length=200, required=True) | ||||
|         date_modified = DateTimeField(default=datetime.datetime.utcnow) | ||||
|         date_modified = DateTimeField(default=datetime.datetime.now) | ||||
|  | ||||
| As BSON (the binary format for storing data in mongodb) is order dependent, | ||||
| documents are serialized based on their field order. | ||||
| @@ -80,16 +80,13 @@ are as follows: | ||||
| * :class:`~mongoengine.fields.FloatField` | ||||
| * :class:`~mongoengine.fields.GenericEmbeddedDocumentField` | ||||
| * :class:`~mongoengine.fields.GenericReferenceField` | ||||
| * :class:`~mongoengine.fields.GenericLazyReferenceField` | ||||
| * :class:`~mongoengine.fields.GeoPointField` | ||||
| * :class:`~mongoengine.fields.ImageField` | ||||
| * :class:`~mongoengine.fields.IntField` | ||||
| * :class:`~mongoengine.fields.ListField` | ||||
| * :class:`~mongoengine.fields.LongField` | ||||
| * :class:`~mongoengine.fields.MapField` | ||||
| * :class:`~mongoengine.fields.ObjectIdField` | ||||
| * :class:`~mongoengine.fields.ReferenceField` | ||||
| * :class:`~mongoengine.fields.LazyReferenceField` | ||||
| * :class:`~mongoengine.fields.SequenceField` | ||||
| * :class:`~mongoengine.fields.SortedListField` | ||||
| * :class:`~mongoengine.fields.StringField` | ||||
| @@ -156,7 +153,7 @@ arguments can be set on all fields: | ||||
|     An iterable (e.g. list, tuple or set) of choices to which the value of this | ||||
|     field should be limited. | ||||
|  | ||||
|     Can either be nested tuples of value (stored in mongo) and a | ||||
|     Can be either be a nested tuples of value (stored in mongo) and a | ||||
|     human readable key :: | ||||
|  | ||||
|         SIZE = (('S', 'Small'), | ||||
| @@ -176,21 +173,6 @@ arguments can be set on all fields: | ||||
|         class Shirt(Document): | ||||
|             size = StringField(max_length=3, choices=SIZE) | ||||
|  | ||||
| :attr:`validation` (Optional) | ||||
|     A callable to validate the value of the field. | ||||
|     The callable takes the value as parameter and should raise a ValidationError | ||||
|     if validation fails | ||||
|  | ||||
|     e.g :: | ||||
|  | ||||
|         def _not_empty(val): | ||||
|             if not val: | ||||
|                 raise ValidationError('value can not be empty') | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField(validation=_not_empty) | ||||
|  | ||||
|  | ||||
| :attr:`**kwargs` (Optional) | ||||
|     You can supply additional metadata as arbitrary additional keyword | ||||
|     arguments.  You can not override existing attributes, however.  Common | ||||
| @@ -242,7 +224,7 @@ store; in this situation a :class:`~mongoengine.fields.DictField` is appropriate | ||||
|         user = ReferenceField(User) | ||||
|         answers = DictField() | ||||
|  | ||||
|     survey_response = SurveyResponse(date=datetime.utcnow(), user=request.user) | ||||
|     survey_response = SurveyResponse(date=datetime.now(), user=request.user) | ||||
|     response_form = ResponseForm(request.POST) | ||||
|     survey_response.answers = response_form.cleaned_data() | ||||
|     survey_response.save() | ||||
| @@ -352,7 +334,7 @@ Its value can take any of the following constants: | ||||
|   Deletion is denied if there still exist references to the object being | ||||
|   deleted. | ||||
| :const:`mongoengine.NULLIFY` | ||||
|   Any object's fields still referring to the object being deleted are set to None | ||||
|   Any object's fields still referring to the object being deleted are removed | ||||
|   (using MongoDB's "unset" operation), effectively nullifying the relationship. | ||||
| :const:`mongoengine.CASCADE` | ||||
|   Any object containing fields that are referring to the object being deleted | ||||
| @@ -508,9 +490,7 @@ the field name with a **#**:: | ||||
|             ] | ||||
|         } | ||||
|  | ||||
| If a dictionary is passed then additional options become available. Valid options include, | ||||
| but are not limited to: | ||||
|  | ||||
| If a dictionary is passed then the following options are available: | ||||
|  | ||||
| :attr:`fields` (Default: None) | ||||
|     The fields to index. Specified in the same format as described above. | ||||
| @@ -531,15 +511,8 @@ but are not limited to: | ||||
|     Allows you to automatically expire data from a collection by setting the | ||||
|     time in seconds to expire the a field. | ||||
|  | ||||
| :attr:`name` (Optional) | ||||
|     Allows you to specify a name for the index | ||||
|  | ||||
| :attr:`collation` (Optional) | ||||
|     Allows to create case insensitive indexes (MongoDB v3.4+ only) | ||||
|  | ||||
| .. note:: | ||||
|  | ||||
|     Additional options are forwarded as **kwargs to pymongo's create_index method. | ||||
|     Inheritance adds extra fields indices see: :ref:`document-inheritance`. | ||||
|  | ||||
| Global index default options | ||||
| @@ -551,16 +524,15 @@ There are a few top level defaults for all indexes that can be set:: | ||||
|         title = StringField() | ||||
|         rating = StringField() | ||||
|         meta = { | ||||
|             'index_opts': {}, | ||||
|             'index_options': {}, | ||||
|             'index_background': True, | ||||
|             'index_cls': False, | ||||
|             'auto_create_index': True, | ||||
|             'index_drop_dups': True, | ||||
|             'index_cls': False | ||||
|         } | ||||
|  | ||||
|  | ||||
| :attr:`index_opts` (Optional) | ||||
|     Set any default index options - see the `full options list <https://docs.mongodb.com/manual/reference/method/db.collection.createIndex/#db.collection.createIndex>`_ | ||||
| :attr:`index_options` (Optional) | ||||
|     Set any default index options - see the `full options list <http://docs.mongodb.org/manual/reference/method/db.collection.ensureIndex/#db.collection.ensureIndex>`_ | ||||
|  | ||||
| :attr:`index_background` (Optional) | ||||
|     Set the default value for if an index should be indexed in the background | ||||
| @@ -568,15 +540,10 @@ There are a few top level defaults for all indexes that can be set:: | ||||
| :attr:`index_cls` (Optional) | ||||
|     A way to turn off a specific index for _cls. | ||||
|  | ||||
| :attr:`auto_create_index` (Optional) | ||||
|     When this is True (default), MongoEngine will ensure that the correct | ||||
|     indexes exist in MongoDB each time a command is run. This can be disabled | ||||
|     in systems where indexes are managed separately. Disabling this will improve | ||||
|     performance. | ||||
|  | ||||
| :attr:`index_drop_dups` (Optional) | ||||
|     Set the default value for if an index should drop duplicates | ||||
|     Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning | ||||
|  | ||||
| .. note:: Since MongoDB 3.0 drop_dups is not supported anymore. Raises a Warning | ||||
|     and has no effect | ||||
|  | ||||
|  | ||||
| @@ -651,7 +618,7 @@ collection after a given period. See the official | ||||
| documentation for more information.  A common usecase might be session data:: | ||||
|  | ||||
|     class Session(Document): | ||||
|         created = DateTimeField(default=datetime.utcnow) | ||||
|         created = DateTimeField(default=datetime.now) | ||||
|         meta = { | ||||
|             'indexes': [ | ||||
|                 {'fields': ['created'], 'expireAfterSeconds': 3600} | ||||
| @@ -714,16 +681,11 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. :: | ||||
| Shard keys | ||||
| ========== | ||||
|  | ||||
| If your collection is sharded by multiple keys, then you can improve shard | ||||
| routing (and thus the performance of your application) by specifying the shard | ||||
| key, using the :attr:`shard_key` attribute of | ||||
| :attr:`~mongoengine.Document.meta`. The shard key should be defined as a tuple. | ||||
|  | ||||
| This ensures that the full shard key is sent with the query when calling | ||||
| methods such as :meth:`~mongoengine.document.Document.save`, | ||||
| :meth:`~mongoengine.document.Document.update`, | ||||
| :meth:`~mongoengine.document.Document.modify`, or | ||||
| :meth:`~mongoengine.document.Document.delete` on an existing | ||||
| If your collection is sharded, then you need to specify the shard key as a tuple, | ||||
| using the :attr:`shard_key` attribute of :attr:`~mongoengine.Document.meta`. | ||||
| This ensures that the shard key is sent with the query when calling the | ||||
| :meth:`~mongoengine.document.Document.save` or | ||||
| :meth:`~mongoengine.document.Document.update` method on an existing | ||||
| :class:`~mongoengine.Document` instance:: | ||||
|  | ||||
|     class LogEntry(Document): | ||||
| @@ -733,8 +695,7 @@ methods such as :meth:`~mongoengine.document.Document.save`, | ||||
|         data = StringField() | ||||
|  | ||||
|         meta = { | ||||
|             'shard_key': ('machine', 'timestamp'), | ||||
|             'indexes': ('machine', 'timestamp'), | ||||
|             'shard_key': ('machine', 'timestamp',) | ||||
|         } | ||||
|  | ||||
| .. _document-inheritance: | ||||
| @@ -744,7 +705,7 @@ Document inheritance | ||||
|  | ||||
| To create a specialised type of a :class:`~mongoengine.Document` you have | ||||
| defined, you may subclass it and add any extra fields or methods you may need. | ||||
| As this new class is not a direct subclass of | ||||
| As this is new class is not a direct subclass of | ||||
| :class:`~mongoengine.Document`, it will not be stored in its own collection; it | ||||
| will use the same collection as its superclass uses. This allows for more | ||||
| convenient and efficient retrieval of related documents -- all you need do is | ||||
| @@ -764,30 +725,6 @@ document.:: | ||||
| .. note:: From 0.8 onwards :attr:`allow_inheritance` defaults | ||||
|           to False, meaning you must set it to True to use inheritance. | ||||
|  | ||||
|           Setting :attr:`allow_inheritance` to True should also be used in | ||||
|           :class:`~mongoengine.EmbeddedDocument` class in case you need to subclass it | ||||
|  | ||||
| When it comes to querying using :attr:`.objects()`, querying `Page.objects()` will query | ||||
| both `Page` and `DatedPage` whereas querying `DatedPage` will only query the `DatedPage` documents. | ||||
| Behind the scenes, MongoEngine deals with inheritance by adding a :attr:`_cls` attribute that contains | ||||
| the class name in every documents. When a document is loaded, MongoEngine checks | ||||
| it's :attr:`_cls` attribute and use that class to construct the instance.:: | ||||
|  | ||||
|     Page(title='a funky title').save() | ||||
|     DatedPage(title='another title', date=datetime.utcnow()).save() | ||||
|  | ||||
|     print(Page.objects().count())         # 2 | ||||
|     print(DatedPage.objects().count())    # 1 | ||||
|  | ||||
|     # print documents in their native form | ||||
|     # we remove 'id' to avoid polluting the output with unnecessary detail | ||||
|     qs = Page.objects.exclude('id').as_pymongo() | ||||
|     print(list(qs)) | ||||
|     # [ | ||||
|     #   {'_cls': u 'Page', 'title': 'a funky title'}, | ||||
|     #   {'_cls': u 'Page.DatedPage', 'title': u 'another title', 'date': datetime.datetime(2019, 12, 13, 20, 16, 59, 993000)} | ||||
|     # ] | ||||
|  | ||||
| Working with existing data | ||||
| -------------------------- | ||||
| As MongoEngine no longer defaults to needing :attr:`_cls`, you can quickly and | ||||
|   | ||||
| @@ -57,8 +57,7 @@ document values for example:: | ||||
|  | ||||
|         def clean(self): | ||||
|             """Ensures that only published essays have a `pub_date` and | ||||
|             automatically sets `pub_date` if essay is published and `pub_date` | ||||
|             is not set""" | ||||
|             automatically sets the pub_date if published and not set""" | ||||
|             if self.status == 'Draft' and self.pub_date is not None: | ||||
|                 msg = 'Draft entries should not have a publication date.' | ||||
|                 raise ValidationError(msg) | ||||
|   | ||||
| @@ -10,9 +10,8 @@ Writing | ||||
| GridFS support comes in the form of the :class:`~mongoengine.fields.FileField` field | ||||
| object. This field acts as a file-like object and provides a couple of | ||||
| different ways of inserting and retrieving data. Arbitrary metadata such as | ||||
| content type can also be stored alongside the files. The object returned when accessing a | ||||
| FileField is a proxy to `Pymongo's GridFS <https://api.mongodb.com/python/current/examples/gridfs.html#gridfs-example>`_ | ||||
| In the following example, a document is created to store details about animals, including a photo:: | ||||
| content type can also be stored alongside the files. In the following example, | ||||
| a document is created to store details about animals, including a photo:: | ||||
|  | ||||
|     class Animal(Document): | ||||
|         genus = StringField() | ||||
| @@ -21,8 +20,8 @@ In the following example, a document is created to store details about animals, | ||||
|  | ||||
|     marmot = Animal(genus='Marmota', family='Sciuridae') | ||||
|  | ||||
|     with open('marmot.jpg', 'rb') as fd: | ||||
|         marmot.photo.put(fd, content_type = 'image/jpeg') | ||||
|     marmot_photo = open('marmot.jpg', 'rb') | ||||
|     marmot.photo.put(marmot_photo, content_type = 'image/jpeg') | ||||
|     marmot.save() | ||||
|  | ||||
| Retrieval | ||||
| @@ -35,20 +34,6 @@ field. The file can also be retrieved just as easily:: | ||||
|     photo = marmot.photo.read() | ||||
|     content_type = marmot.photo.content_type | ||||
|  | ||||
| .. note:: If you need to read() the content of a file multiple times, you'll need to "rewind" | ||||
|     the file-like object using `seek`:: | ||||
|  | ||||
|         marmot = Animal.objects(genus='Marmota').first() | ||||
|         content1 = marmot.photo.read() | ||||
|         assert content1 != "" | ||||
|  | ||||
|         content2 = marmot.photo.read()    # will be empty | ||||
|         assert content2 == "" | ||||
|  | ||||
|         marmot.photo.seek(0)              # rewind the file by setting the current position of the cursor in the file to 0 | ||||
|         content3 = marmot.photo.read() | ||||
|         assert content3 == content1 | ||||
|  | ||||
| Streaming | ||||
| --------- | ||||
|  | ||||
| @@ -68,8 +53,7 @@ Deletion | ||||
|  | ||||
| Deleting stored files is achieved with the :func:`delete` method:: | ||||
|  | ||||
|     marmot.photo.delete()    # Deletes the GridFS document | ||||
|     marmot.save()            # Saves the GridFS reference (being None) contained in the marmot instance | ||||
|     marmot.photo.delete() | ||||
|  | ||||
| .. warning:: | ||||
|  | ||||
| @@ -87,5 +71,4 @@ Files can be replaced with the :func:`replace` method. This works just like | ||||
| the :func:`put` method so even metadata can (and should) be replaced:: | ||||
|  | ||||
|     another_marmot = open('another_marmot.png', 'rb') | ||||
|     marmot.photo.replace(another_marmot, content_type='image/png')  # Replaces the GridFS document | ||||
|     marmot.save()                                                   # Replaces the GridFS reference contained in marmot instance | ||||
|     marmot.photo.replace(another_marmot, content_type='image/png') | ||||
|   | ||||
| @@ -13,5 +13,4 @@ User Guide | ||||
|    gridfs | ||||
|    signals | ||||
|    text-indexes | ||||
|    logging-monitoring | ||||
|    mongomock | ||||
|   | ||||
| @@ -1,80 +0,0 @@ | ||||
| ================== | ||||
| Logging/Monitoring | ||||
| ================== | ||||
|  | ||||
| It is possible to use `pymongo.monitoring <https://api.mongodb.com/python/current/api/pymongo/monitoring.html>`_ to monitor | ||||
| the driver events (e.g: queries, connections, etc). This can be handy if you want to monitor the queries issued by | ||||
| MongoEngine to the driver. | ||||
|  | ||||
| To use `pymongo.monitoring` with MongoEngine, you need to make sure that you are registering the listeners | ||||
| **before** establishing the database connection (i.e calling `connect`): | ||||
|  | ||||
| The following snippet provides a basic logging of all command events: | ||||
|  | ||||
| .. code-block:: python | ||||
|  | ||||
|     import logging | ||||
|     from pymongo import monitoring | ||||
|     from mongoengine import * | ||||
|  | ||||
|     log = logging.getLogger() | ||||
|     log.setLevel(logging.DEBUG) | ||||
|     logging.basicConfig(level=logging.DEBUG) | ||||
|  | ||||
|  | ||||
|     class CommandLogger(monitoring.CommandListener): | ||||
|  | ||||
|         def started(self, event): | ||||
|             log.debug("Command {0.command_name} with request id " | ||||
|                      "{0.request_id} started on server " | ||||
|                      "{0.connection_id}".format(event)) | ||||
|  | ||||
|         def succeeded(self, event): | ||||
|             log.debug("Command {0.command_name} with request id " | ||||
|                      "{0.request_id} on server {0.connection_id} " | ||||
|                      "succeeded in {0.duration_micros} " | ||||
|                      "microseconds".format(event)) | ||||
|  | ||||
|         def failed(self, event): | ||||
|             log.debug("Command {0.command_name} with request id " | ||||
|                      "{0.request_id} on server {0.connection_id} " | ||||
|                      "failed in {0.duration_micros} " | ||||
|                      "microseconds".format(event)) | ||||
|  | ||||
|     monitoring.register(CommandLogger()) | ||||
|  | ||||
|  | ||||
|     class Jedi(Document): | ||||
|         name = StringField() | ||||
|  | ||||
|  | ||||
|     connect() | ||||
|  | ||||
|  | ||||
|     log.info('GO!') | ||||
|  | ||||
|     log.info('Saving an item through MongoEngine...') | ||||
|     Jedi(name='Obi-Wan Kenobii').save() | ||||
|  | ||||
|     log.info('Querying through MongoEngine...') | ||||
|     obiwan = Jedi.objects.first() | ||||
|  | ||||
|     log.info('Updating through MongoEngine...') | ||||
|     obiwan.name = 'Obi-Wan Kenobi' | ||||
|     obiwan.save() | ||||
|  | ||||
|  | ||||
| Executing this prints the following output:: | ||||
|  | ||||
|     INFO:root:GO! | ||||
|     INFO:root:Saving an item through MongoEngine... | ||||
|     DEBUG:root:Command insert with request id 1681692777 started on server ('localhost', 27017) | ||||
|     DEBUG:root:Command insert with request id 1681692777 on server ('localhost', 27017) succeeded in 562 microseconds | ||||
|     INFO:root:Querying through MongoEngine... | ||||
|     DEBUG:root:Command find with request id 1714636915 started on server ('localhost', 27017) | ||||
|     DEBUG:root:Command find with request id 1714636915 on server ('localhost', 27017) succeeded in 341 microseconds | ||||
|     INFO:root:Updating through MongoEngine... | ||||
|     DEBUG:root:Command update with request id 1957747793 started on server ('localhost', 27017) | ||||
|     DEBUG:root:Command update with request id 1957747793 on server ('localhost', 27017) succeeded in 455 microseconds | ||||
|  | ||||
| More details can of course be obtained by checking the `event` argument from the `CommandListener`. | ||||
| @@ -19,30 +19,3 @@ or with an alias: | ||||
|  | ||||
|     connect('mongoenginetest', host='mongomock://localhost', alias='testdb') | ||||
|     conn = get_connection('testdb') | ||||
|  | ||||
| Example of test file: | ||||
| --------------------- | ||||
| .. code-block:: python | ||||
|  | ||||
|     import unittest | ||||
|     from mongoengine import connect, disconnect | ||||
|  | ||||
|     class Person(Document): | ||||
|         name = StringField() | ||||
|  | ||||
|     class TestPerson(unittest.TestCase): | ||||
|  | ||||
|         @classmethod | ||||
|         def setUpClass(cls): | ||||
|             connect('mongoenginetest', host='mongomock://localhost') | ||||
|  | ||||
|         @classmethod | ||||
|         def tearDownClass(cls): | ||||
|            disconnect() | ||||
|  | ||||
|         def test_thing(self): | ||||
|             pers = Person(name='John') | ||||
|             pers.save() | ||||
|  | ||||
|             fresh_pers = Person.objects().first() | ||||
|             assert fresh_pers.name ==  'John' | ||||
|   | ||||
| @@ -64,7 +64,7 @@ Available operators are as follows: | ||||
| * ``gt`` -- greater than | ||||
| * ``gte`` -- greater than or equal to | ||||
| * ``not`` -- negate a standard check, may be used before other operators (e.g. | ||||
|   ``Q(age__not__mod=(5, 0))``) | ||||
|   ``Q(age__not__mod=5)``) | ||||
| * ``in`` -- value is in list (a list of values should be provided) | ||||
| * ``nin`` -- value is not in list (a list of values should be provided) | ||||
| * ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values | ||||
| @@ -222,18 +222,6 @@ keyword argument:: | ||||
|  | ||||
| .. versionadded:: 0.4 | ||||
|  | ||||
| Sorting/Ordering results | ||||
| ======================== | ||||
| It is possible to order the results by 1 or more keys using :meth:`~mongoengine.queryset.QuerySet.order_by`. | ||||
| The order may be specified by prepending each of the keys by "+" or "-". Ascending order is assumed if there's no prefix.:: | ||||
|  | ||||
|     # Order by ascending date | ||||
|     blogs = BlogPost.objects().order_by('date')    # equivalent to .order_by('+date') | ||||
|  | ||||
|     # Order by ascending date first, then descending title | ||||
|     blogs = BlogPost.objects().order_by('+date', '-title') | ||||
|  | ||||
|  | ||||
| Limiting and skipping results | ||||
| ============================= | ||||
| Just as with traditional ORMs, you may limit the number of results returned or | ||||
| @@ -361,9 +349,9 @@ Just as with limiting and skipping results, there is a method on a | ||||
| You could technically use ``len(User.objects)`` to get the same result, but it | ||||
| would be significantly slower than :meth:`~mongoengine.queryset.QuerySet.count`. | ||||
| When you execute a server-side count query, you let MongoDB do the heavy | ||||
| lifting and you receive a single integer over the wire. Meanwhile, ``len()`` | ||||
| lifting and you receive a single integer over the wire. Meanwhile, len() | ||||
| retrieves all the results, places them in a local cache, and finally counts | ||||
| them. If we compare the performance of the two operations, ``len()`` is much slower | ||||
| them. If we compare the performance of the two operations, len() is much slower | ||||
| than :meth:`~mongoengine.queryset.QuerySet.count`. | ||||
|  | ||||
| Further aggregation | ||||
| @@ -398,25 +386,6 @@ would be generating "tag-clouds":: | ||||
|     top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10] | ||||
|  | ||||
|  | ||||
| MongoDB aggregation API | ||||
| ----------------------- | ||||
| If you need to run aggregation pipelines, MongoEngine provides an entry point to `Pymongo's aggregation framework <https://api.mongodb.com/python/current/examples/aggregation.html#aggregation-framework>`_ | ||||
| through :meth:`~mongoengine.queryset.QuerySet.aggregate`. Check out Pymongo's documentation for the syntax and pipeline. | ||||
| An example of its use would be:: | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         Person(name='John').save() | ||||
|         Person(name='Bob').save() | ||||
|  | ||||
|         pipeline = [ | ||||
|             {"$sort" : {"name" : -1}}, | ||||
|             {"$project": {"_id": 0, "name": {"$toUpper": "$name"}}} | ||||
|             ] | ||||
|         data = Person.objects().aggregate(pipeline) | ||||
|         assert data == [{'name': 'BOB'}, {'name': 'JOHN'}] | ||||
|  | ||||
| Query efficiency and performance | ||||
| ================================ | ||||
|  | ||||
| @@ -487,14 +456,14 @@ data. To turn off dereferencing of the results of a query use | ||||
| :func:`~mongoengine.queryset.QuerySet.no_dereference` on the queryset like so:: | ||||
|  | ||||
|     post = Post.objects.no_dereference().first() | ||||
|     assert(isinstance(post.author, DBRef)) | ||||
|     assert(isinstance(post.author, ObjectId)) | ||||
|  | ||||
| You can also turn off all dereferencing for a fixed period by using the | ||||
| :class:`~mongoengine.context_managers.no_dereference` context manager:: | ||||
|  | ||||
|     with no_dereference(Post) as Post: | ||||
|         post = Post.objects.first() | ||||
|         assert(isinstance(post.author, DBRef)) | ||||
|         assert(isinstance(post.author, ObjectId)) | ||||
|  | ||||
|     # Outside the context manager dereferencing occurs. | ||||
|     assert(isinstance(post.author, User)) | ||||
| @@ -596,16 +565,6 @@ cannot use the `$` syntax in keyword arguments it has been mapped to `S`:: | ||||
|     >>> post.tags | ||||
|     ['database', 'mongodb'] | ||||
|  | ||||
| From MongoDB version 2.6, push operator supports $position value which allows | ||||
| to push values with index:: | ||||
|  | ||||
|     >>> post = BlogPost(title="Test", tags=["mongo"]) | ||||
|     >>> post.save() | ||||
|     >>> post.update(push__tags__0=["database", "code"]) | ||||
|     >>> post.reload() | ||||
|     >>> post.tags | ||||
|     ['database', 'code', 'mongo'] | ||||
|  | ||||
| .. note:: | ||||
|     Currently only top level lists are handled, future versions of mongodb / | ||||
|     pymongo plan to support nested positional operators.  See `The $ positional | ||||
|   | ||||
| @@ -43,10 +43,10 @@ Available signals include: | ||||
|   has taken place but before saving. | ||||
|  | ||||
| `post_save` | ||||
|   Called within :meth:`~mongoengine.Document.save` after most actions | ||||
|   (validation, insert/update, and cascades, but not clearing dirty flags) have  | ||||
|   completed successfully.  Passed the additional boolean keyword argument  | ||||
|   `created` to indicate if the save was an insert or an update. | ||||
|   Called within :meth:`~mongoengine.Document.save` after all actions | ||||
|   (validation, insert/update, cascades, clearing dirty flags) have completed | ||||
|   successfully.  Passed the additional boolean keyword argument `created` to | ||||
|   indicate if the save was an insert or an update. | ||||
|  | ||||
| `pre_delete` | ||||
|   Called within :meth:`~mongoengine.Document.delete` prior to | ||||
| @@ -113,10 +113,6 @@ handlers within your subclass:: | ||||
|     signals.pre_save.connect(Author.pre_save, sender=Author) | ||||
|     signals.post_save.connect(Author.post_save, sender=Author) | ||||
|  | ||||
| .. warning:: | ||||
|  | ||||
|     Note that EmbeddedDocument only supports pre/post_init signals. pre/post_save, etc should be attached to Document's class only. Attaching pre_save to an EmbeddedDocument is ignored silently. | ||||
|  | ||||
| Finally, you can also use this small decorator to quickly create a number of | ||||
| signals and attach them to your :class:`~mongoengine.Document` or | ||||
| :class:`~mongoengine.EmbeddedDocument` subclasses as class decorators:: | ||||
|   | ||||
| @@ -48,4 +48,4 @@ Ordering by text score | ||||
|  | ||||
| :: | ||||
|  | ||||
|   objects = News.objects.search_text('mongo').order_by('$text_score') | ||||
|   objects = News.objects.search('mongo').order_by('$text_score') | ||||
|   | ||||
| @@ -23,18 +23,9 @@ MongoDB. To install it, simply run | ||||
| :doc:`upgrade` | ||||
|   How to upgrade MongoEngine. | ||||
|  | ||||
| :doc:`faq` | ||||
|   Frequently Asked Questions | ||||
|  | ||||
| :doc:`django` | ||||
|   Using MongoEngine and Django | ||||
|  | ||||
| MongoDB and driver support | ||||
| -------------------------- | ||||
|  | ||||
| MongoEngine is based on the PyMongo driver and tested against multiple versions of MongoDB. | ||||
| For further details, please refer to the `readme <https://github.com/MongoEngine/mongoengine#mongoengine>`_. | ||||
|  | ||||
| Community | ||||
| --------- | ||||
|  | ||||
| @@ -82,7 +73,6 @@ formats for offline reading. | ||||
|     apireference | ||||
|     changelog | ||||
|     upgrade | ||||
|     faq | ||||
|     django | ||||
|  | ||||
| Indices and tables | ||||
|   | ||||
| @@ -86,7 +86,7 @@ of them stand out as particularly intuitive solutions. | ||||
| Posts | ||||
| ^^^^^ | ||||
|  | ||||
| Happily MongoDB *isn't* a relational database, so we're not going to do it that | ||||
| Happily mongoDB *isn't* a relational database, so we're not going to do it that | ||||
| way. As it turns out, we can use MongoDB's schemaless nature to provide us with | ||||
| a much nicer solution. We will store all of the posts in *one collection* and | ||||
| each post type will only store the fields it needs. If we later want to add | ||||
| @@ -153,7 +153,7 @@ post. This works, but there is no real reason to be storing the comments | ||||
| separately from their associated posts, other than to work around the | ||||
| relational model. Using MongoDB we can store the comments as a list of | ||||
| *embedded documents* directly on a post document. An embedded document should | ||||
| be treated no differently than a regular document; it just doesn't have its own | ||||
| be treated no differently that a regular document; it just doesn't have its own | ||||
| collection in the database. Using MongoEngine, we can define the structure of | ||||
| embedded documents, along with utility methods, in exactly the same way we do | ||||
| with regular documents:: | ||||
|   | ||||
| @@ -6,23 +6,6 @@ Development | ||||
| *********** | ||||
| (Fill this out whenever you introduce breaking changes to MongoEngine) | ||||
|  | ||||
| URLField's constructor no longer takes `verify_exists` | ||||
|  | ||||
| 0.15.0 | ||||
| ****** | ||||
|  | ||||
| 0.14.0 | ||||
| ****** | ||||
| This release includes a few bug fixes and a significant code cleanup. The most | ||||
| important change is that `QuerySet.as_pymongo` no longer supports a | ||||
| `coerce_types` mode. If you used it in the past, a) please let us know of your | ||||
| use case, b) you'll need to override `as_pymongo` to get the desired outcome. | ||||
|  | ||||
| This release also makes the EmbeddedDocument not hashable by default. If you | ||||
| use embedded documents in sets or dictionaries, you might have to override | ||||
| `__hash__` and implement a hashing logic specific to your use case. See #1528 | ||||
| for the reason behind this change. | ||||
|  | ||||
| 0.13.0 | ||||
| ****** | ||||
| This release adds Unicode support to the `EmailField` and changes its | ||||
|   | ||||
| @@ -18,25 +18,19 @@ from mongoengine.queryset import * | ||||
| from mongoengine.signals import * | ||||
|  | ||||
|  | ||||
| __all__ = ( | ||||
|     list(document.__all__) | ||||
|     + list(fields.__all__) | ||||
|     + list(connection.__all__) | ||||
|     + list(queryset.__all__) | ||||
|     + list(signals.__all__) | ||||
|     + list(errors.__all__) | ||||
| ) | ||||
| __all__ = (list(document.__all__) + list(fields.__all__) + | ||||
|            list(connection.__all__) + list(queryset.__all__) + | ||||
|            list(signals.__all__) + list(errors.__all__)) | ||||
|  | ||||
|  | ||||
| VERSION = (0, 19, 1) | ||||
| VERSION = (0, 13, 0) | ||||
|  | ||||
|  | ||||
| def get_version(): | ||||
|     """Return the VERSION as a string. | ||||
|  | ||||
|     For example, if `VERSION == (0, 10, 7)`, return '0.10.7'. | ||||
|     """Return the VERSION as a string, e.g. for VERSION == (0, 10, 7), | ||||
|     return '0.10.7'. | ||||
|     """ | ||||
|     return ".".join(map(str, VERSION)) | ||||
|     return '.'.join(map(str, VERSION)) | ||||
|  | ||||
|  | ||||
| __version__ = get_version() | ||||
|   | ||||
| @@ -12,22 +12,17 @@ from mongoengine.base.metaclasses import * | ||||
|  | ||||
| __all__ = ( | ||||
|     # common | ||||
|     "UPDATE_OPERATORS", | ||||
|     "_document_registry", | ||||
|     "get_document", | ||||
|     'UPDATE_OPERATORS', '_document_registry', 'get_document', | ||||
|  | ||||
|     # datastructures | ||||
|     "BaseDict", | ||||
|     "BaseList", | ||||
|     "EmbeddedDocumentList", | ||||
|     "LazyReference", | ||||
|     'BaseDict', 'BaseList', 'EmbeddedDocumentList', | ||||
|  | ||||
|     # document | ||||
|     "BaseDocument", | ||||
|     'BaseDocument', | ||||
|  | ||||
|     # fields | ||||
|     "BaseField", | ||||
|     "ComplexBaseField", | ||||
|     "ObjectIdField", | ||||
|     "GeoJsonBaseField", | ||||
|     'BaseField', 'ComplexBaseField', 'ObjectIdField', 'GeoJsonBaseField', | ||||
|  | ||||
|     # metaclasses | ||||
|     "DocumentMetaclass", | ||||
|     "TopLevelDocumentMetaclass", | ||||
|     'DocumentMetaclass', 'TopLevelDocumentMetaclass' | ||||
| ) | ||||
|   | ||||
| @@ -1,62 +1,31 @@ | ||||
| from mongoengine.errors import NotRegistered | ||||
|  | ||||
| __all__ = ("UPDATE_OPERATORS", "get_document", "_document_registry") | ||||
| __all__ = ('UPDATE_OPERATORS', 'get_document', '_document_registry') | ||||
|  | ||||
|  | ||||
| UPDATE_OPERATORS = { | ||||
|     "set", | ||||
|     "unset", | ||||
|     "inc", | ||||
|     "dec", | ||||
|     "mul", | ||||
|     "pop", | ||||
|     "push", | ||||
|     "push_all", | ||||
|     "pull", | ||||
|     "pull_all", | ||||
|     "add_to_set", | ||||
|     "set_on_insert", | ||||
|     "min", | ||||
|     "max", | ||||
|     "rename", | ||||
| } | ||||
| UPDATE_OPERATORS = set(['set', 'unset', 'inc', 'dec', 'pop', 'push', | ||||
|                         'push_all', 'pull', 'pull_all', 'add_to_set', | ||||
|                         'set_on_insert', 'min', 'max', 'rename']) | ||||
|  | ||||
|  | ||||
| _document_registry = {} | ||||
|  | ||||
|  | ||||
| def get_document(name): | ||||
|     """Get a registered Document class by name.""" | ||||
|     """Get a document class by name.""" | ||||
|     doc = _document_registry.get(name, None) | ||||
|     if not doc: | ||||
|         # Possible old style name | ||||
|         single_end = name.split(".")[-1] | ||||
|         compound_end = ".%s" % single_end | ||||
|         possible_match = [ | ||||
|             k for k in _document_registry if k.endswith(compound_end) or k == single_end | ||||
|         ] | ||||
|         single_end = name.split('.')[-1] | ||||
|         compound_end = '.%s' % single_end | ||||
|         possible_match = [k for k in _document_registry.keys() | ||||
|                           if k.endswith(compound_end) or k == single_end] | ||||
|         if len(possible_match) == 1: | ||||
|             doc = _document_registry.get(possible_match.pop(), None) | ||||
|     if not doc: | ||||
|         raise NotRegistered( | ||||
|             """ | ||||
|         raise NotRegistered(""" | ||||
|             `%s` has not been registered in the document registry. | ||||
|             Importing the document class automatically registers it, has it | ||||
|             been imported? | ||||
|         """.strip() | ||||
|             % name | ||||
|         ) | ||||
|         """.strip() % name) | ||||
|     return doc | ||||
|  | ||||
|  | ||||
| def _get_documents_by_db(connection_alias, default_connection_alias): | ||||
|     """Get all registered Documents class attached to a given database""" | ||||
|  | ||||
|     def get_doc_alias(doc_cls): | ||||
|         return doc_cls._meta.get("db_alias", default_connection_alias) | ||||
|  | ||||
|     return [ | ||||
|         doc_cls | ||||
|         for doc_cls in _document_registry.values() | ||||
|         if get_doc_alias(doc_cls) == connection_alias | ||||
|     ] | ||||
|   | ||||
| @@ -1,43 +1,12 @@ | ||||
| import itertools | ||||
| import weakref | ||||
|  | ||||
| from bson import DBRef | ||||
| import six | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.errors import DoesNotExist, MultipleObjectsReturned | ||||
|  | ||||
| __all__ = ( | ||||
|     "BaseDict", | ||||
|     "StrictDict", | ||||
|     "BaseList", | ||||
|     "EmbeddedDocumentList", | ||||
|     "LazyReference", | ||||
| ) | ||||
|  | ||||
|  | ||||
| def mark_as_changed_wrapper(parent_method): | ||||
|     """Decorator that ensures _mark_as_changed method gets called.""" | ||||
|  | ||||
|     def wrapper(self, *args, **kwargs): | ||||
|         # Can't use super() in the decorator. | ||||
|         result = parent_method(self, *args, **kwargs) | ||||
|         self._mark_as_changed() | ||||
|         return result | ||||
|  | ||||
|     return wrapper | ||||
|  | ||||
|  | ||||
| def mark_key_as_changed_wrapper(parent_method): | ||||
|     """Decorator that ensures _mark_as_changed method gets called with the key argument""" | ||||
|  | ||||
|     def wrapper(self, key, *args, **kwargs): | ||||
|         # Can't use super() in the decorator. | ||||
|         result = parent_method(self, key, *args, **kwargs) | ||||
|         self._mark_as_changed(key) | ||||
|         return result | ||||
|  | ||||
|     return wrapper | ||||
| __all__ = ('BaseDict', 'BaseList', 'EmbeddedDocumentList') | ||||
|  | ||||
|  | ||||
| class BaseDict(dict): | ||||
| @@ -48,36 +17,46 @@ class BaseDict(dict): | ||||
|     _name = None | ||||
|  | ||||
|     def __init__(self, dict_items, instance, name): | ||||
|         BaseDocument = _import_class("BaseDocument") | ||||
|         Document = _import_class('Document') | ||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||
|  | ||||
|         if isinstance(instance, BaseDocument): | ||||
|         if isinstance(instance, (Document, EmbeddedDocument)): | ||||
|             self._instance = weakref.proxy(instance) | ||||
|         self._name = name | ||||
|         super(BaseDict, self).__init__(dict_items) | ||||
|  | ||||
|     def get(self, key, default=None): | ||||
|         # get does not use __getitem__ by default so we must override it as well | ||||
|         try: | ||||
|             return self.__getitem__(key) | ||||
|         except KeyError: | ||||
|             return default | ||||
|  | ||||
|     def __getitem__(self, key): | ||||
|     def __getitem__(self, key, *args, **kwargs): | ||||
|         value = super(BaseDict, self).__getitem__(key) | ||||
|  | ||||
|         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||
|         if isinstance(value, EmbeddedDocument) and value._instance is None: | ||||
|             value._instance = self._instance | ||||
|         elif isinstance(value, dict) and not isinstance(value, BaseDict): | ||||
|             value = BaseDict(value, None, "%s.%s" % (self._name, key)) | ||||
|         elif not isinstance(value, BaseDict) and isinstance(value, dict): | ||||
|             value = BaseDict(value, None, '%s.%s' % (self._name, key)) | ||||
|             super(BaseDict, self).__setitem__(key, value) | ||||
|             value._instance = self._instance | ||||
|         elif isinstance(value, list) and not isinstance(value, BaseList): | ||||
|             value = BaseList(value, None, "%s.%s" % (self._name, key)) | ||||
|         elif not isinstance(value, BaseList) and isinstance(value, list): | ||||
|             value = BaseList(value, None, '%s.%s' % (self._name, key)) | ||||
|             super(BaseDict, self).__setitem__(key, value) | ||||
|             value._instance = self._instance | ||||
|         return value | ||||
|  | ||||
|     def __setitem__(self, key, value, *args, **kwargs): | ||||
|         self._mark_as_changed(key) | ||||
|         return super(BaseDict, self).__setitem__(key, value) | ||||
|  | ||||
|     def __delete__(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseDict, self).__delete__(*args, **kwargs) | ||||
|  | ||||
|     def __delitem__(self, key, *args, **kwargs): | ||||
|         self._mark_as_changed(key) | ||||
|         return super(BaseDict, self).__delitem__(key) | ||||
|  | ||||
|     def __delattr__(self, key, *args, **kwargs): | ||||
|         self._mark_as_changed(key) | ||||
|         return super(BaseDict, self).__delattr__(key) | ||||
|  | ||||
|     def __getstate__(self): | ||||
|         self.instance = None | ||||
|         self._dereferenced = False | ||||
| @@ -87,19 +66,30 @@ class BaseDict(dict): | ||||
|         self = state | ||||
|         return self | ||||
|  | ||||
|     __setitem__ = mark_key_as_changed_wrapper(dict.__setitem__) | ||||
|     __delattr__ = mark_key_as_changed_wrapper(dict.__delattr__) | ||||
|     __delitem__ = mark_key_as_changed_wrapper(dict.__delitem__) | ||||
|     pop = mark_as_changed_wrapper(dict.pop) | ||||
|     clear = mark_as_changed_wrapper(dict.clear) | ||||
|     update = mark_as_changed_wrapper(dict.update) | ||||
|     popitem = mark_as_changed_wrapper(dict.popitem) | ||||
|     setdefault = mark_as_changed_wrapper(dict.setdefault) | ||||
|     def clear(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseDict, self).clear() | ||||
|  | ||||
|     def pop(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseDict, self).pop(*args, **kwargs) | ||||
|  | ||||
|     def popitem(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseDict, self).popitem() | ||||
|  | ||||
|     def setdefault(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseDict, self).setdefault(*args, **kwargs) | ||||
|  | ||||
|     def update(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseDict, self).update(*args, **kwargs) | ||||
|  | ||||
|     def _mark_as_changed(self, key=None): | ||||
|         if hasattr(self._instance, "_mark_as_changed"): | ||||
|         if hasattr(self._instance, '_mark_as_changed'): | ||||
|             if key: | ||||
|                 self._instance._mark_as_changed("%s.%s" % (self._name, key)) | ||||
|                 self._instance._mark_as_changed('%s.%s' % (self._name, key)) | ||||
|             else: | ||||
|                 self._instance._mark_as_changed(self._name) | ||||
|  | ||||
| @@ -112,42 +102,52 @@ class BaseList(list): | ||||
|     _name = None | ||||
|  | ||||
|     def __init__(self, list_items, instance, name): | ||||
|         BaseDocument = _import_class("BaseDocument") | ||||
|         Document = _import_class('Document') | ||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||
|  | ||||
|         if isinstance(instance, BaseDocument): | ||||
|         if isinstance(instance, (Document, EmbeddedDocument)): | ||||
|             self._instance = weakref.proxy(instance) | ||||
|         self._name = name | ||||
|         super(BaseList, self).__init__(list_items) | ||||
|  | ||||
|     def __getitem__(self, key): | ||||
|         # change index to positive value because MongoDB does not support negative one | ||||
|         if isinstance(key, int) and key < 0: | ||||
|             key = len(self) + key | ||||
|     def __getitem__(self, key, *args, **kwargs): | ||||
|         value = super(BaseList, self).__getitem__(key) | ||||
|  | ||||
|         if isinstance(key, slice): | ||||
|             # When receiving a slice operator, we don't convert the structure and bind | ||||
|             # to parent's instance. This is buggy for now but would require more work to be handled properly | ||||
|             return value | ||||
|  | ||||
|         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||
|         if isinstance(value, EmbeddedDocument) and value._instance is None: | ||||
|             value._instance = self._instance | ||||
|         elif isinstance(value, dict) and not isinstance(value, BaseDict): | ||||
|             # Replace dict by BaseDict | ||||
|             value = BaseDict(value, None, "%s.%s" % (self._name, key)) | ||||
|         elif not isinstance(value, BaseDict) and isinstance(value, dict): | ||||
|             value = BaseDict(value, None, '%s.%s' % (self._name, key)) | ||||
|             super(BaseList, self).__setitem__(key, value) | ||||
|             value._instance = self._instance | ||||
|         elif isinstance(value, list) and not isinstance(value, BaseList): | ||||
|             # Replace list by BaseList | ||||
|             value = BaseList(value, None, "%s.%s" % (self._name, key)) | ||||
|         elif not isinstance(value, BaseList) and isinstance(value, list): | ||||
|             value = BaseList(value, None, '%s.%s' % (self._name, key)) | ||||
|             super(BaseList, self).__setitem__(key, value) | ||||
|             value._instance = self._instance | ||||
|         return value | ||||
|  | ||||
|     def __iter__(self): | ||||
|         for v in super(BaseList, self).__iter__(): | ||||
|             yield v | ||||
|         for i in xrange(self.__len__()): | ||||
|             yield self[i] | ||||
|  | ||||
|     def __setitem__(self, key, value, *args, **kwargs): | ||||
|         if isinstance(key, slice): | ||||
|             self._mark_as_changed() | ||||
|         else: | ||||
|             self._mark_as_changed(key) | ||||
|         return super(BaseList, self).__setitem__(key, value) | ||||
|  | ||||
|     def __delitem__(self, key, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).__delitem__(key) | ||||
|  | ||||
|     def __setslice__(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).__setslice__(*args, **kwargs) | ||||
|  | ||||
|     def __delslice__(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).__delslice__(*args, **kwargs) | ||||
|  | ||||
|     def __getstate__(self): | ||||
|         self.instance = None | ||||
| @@ -158,53 +158,53 @@ class BaseList(list): | ||||
|         self = state | ||||
|         return self | ||||
|  | ||||
|     def __setitem__(self, key, value): | ||||
|         changed_key = key | ||||
|         if isinstance(key, slice): | ||||
|             # In case of slice, we don't bother to identify the exact elements being updated | ||||
|             # instead, we simply marks the whole list as changed | ||||
|             changed_key = None | ||||
|     def __iadd__(self, other): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).__iadd__(other) | ||||
|  | ||||
|         result = super(BaseList, self).__setitem__(key, value) | ||||
|         self._mark_as_changed(changed_key) | ||||
|         return result | ||||
|     def __imul__(self, other): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).__imul__(other) | ||||
|  | ||||
|     append = mark_as_changed_wrapper(list.append) | ||||
|     extend = mark_as_changed_wrapper(list.extend) | ||||
|     insert = mark_as_changed_wrapper(list.insert) | ||||
|     pop = mark_as_changed_wrapper(list.pop) | ||||
|     remove = mark_as_changed_wrapper(list.remove) | ||||
|     reverse = mark_as_changed_wrapper(list.reverse) | ||||
|     sort = mark_as_changed_wrapper(list.sort) | ||||
|     __delitem__ = mark_as_changed_wrapper(list.__delitem__) | ||||
|     __iadd__ = mark_as_changed_wrapper(list.__iadd__) | ||||
|     __imul__ = mark_as_changed_wrapper(list.__imul__) | ||||
|     def append(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).append(*args, **kwargs) | ||||
|  | ||||
|     if six.PY2: | ||||
|         # Under py3 __setslice__, __delslice__ and __getslice__ | ||||
|         # are replaced by __setitem__, __delitem__ and __getitem__ with a slice as parameter | ||||
|         # so we mimic this under python 2 | ||||
|         def __setslice__(self, i, j, sequence): | ||||
|             return self.__setitem__(slice(i, j), sequence) | ||||
|     def extend(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).extend(*args, **kwargs) | ||||
|  | ||||
|         def __delslice__(self, i, j): | ||||
|             return self.__delitem__(slice(i, j)) | ||||
|     def insert(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).insert(*args, **kwargs) | ||||
|  | ||||
|         def __getslice__(self, i, j): | ||||
|             return self.__getitem__(slice(i, j)) | ||||
|     def pop(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).pop(*args, **kwargs) | ||||
|  | ||||
|     def remove(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).remove(*args, **kwargs) | ||||
|  | ||||
|     def reverse(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).reverse() | ||||
|  | ||||
|     def sort(self, *args, **kwargs): | ||||
|         self._mark_as_changed() | ||||
|         return super(BaseList, self).sort(*args, **kwargs) | ||||
|  | ||||
|     def _mark_as_changed(self, key=None): | ||||
|         if hasattr(self._instance, "_mark_as_changed"): | ||||
|         if hasattr(self._instance, '_mark_as_changed'): | ||||
|             if key: | ||||
|                 self._instance._mark_as_changed("%s.%s" % (self._name, key % len(self))) | ||||
|                 self._instance._mark_as_changed( | ||||
|                     '%s.%s' % (self._name, key % len(self)) | ||||
|                 ) | ||||
|             else: | ||||
|                 self._instance._mark_as_changed(self._name) | ||||
|  | ||||
|  | ||||
| class EmbeddedDocumentList(BaseList): | ||||
|     def __init__(self, list_items, instance, name): | ||||
|         super(EmbeddedDocumentList, self).__init__(list_items, instance, name) | ||||
|         self._instance = instance | ||||
|  | ||||
|     @classmethod | ||||
|     def __match_all(cls, embedded_doc, kwargs): | ||||
| @@ -224,14 +224,15 @@ class EmbeddedDocumentList(BaseList): | ||||
|             return embedded_docs | ||||
|         return [doc for doc in embedded_docs if cls.__match_all(doc, kwargs)] | ||||
|  | ||||
|     def __init__(self, list_items, instance, name): | ||||
|         super(EmbeddedDocumentList, self).__init__(list_items, instance, name) | ||||
|         self._instance = instance | ||||
|  | ||||
|     def filter(self, **kwargs): | ||||
|         """ | ||||
|         Filters the list by only including embedded documents with the | ||||
|         given keyword arguments. | ||||
|  | ||||
|         This method only supports simple comparison (e.g: .filter(name='John Doe')) | ||||
|         and does not support operators like __gte, __lte, __icontains like queryset.filter does | ||||
|  | ||||
|         :param kwargs: The keyword arguments corresponding to the fields to | ||||
|          filter on. *Multiple arguments are treated as if they are ANDed | ||||
|          together.* | ||||
| @@ -286,10 +287,12 @@ class EmbeddedDocumentList(BaseList): | ||||
|         """ | ||||
|         values = self.__only_matches(self, kwargs) | ||||
|         if len(values) == 0: | ||||
|             raise DoesNotExist("%s matching query does not exist." % self._name) | ||||
|             raise DoesNotExist( | ||||
|                 '%s matching query does not exist.' % self._name | ||||
|             ) | ||||
|         elif len(values) > 1: | ||||
|             raise MultipleObjectsReturned( | ||||
|                 "%d items returned, instead of 1" % len(values) | ||||
|                 '%d items returned, instead of 1' % len(values) | ||||
|             ) | ||||
|  | ||||
|         return values[0] | ||||
| @@ -347,8 +350,7 @@ class EmbeddedDocumentList(BaseList): | ||||
|  | ||||
|     def update(self, **update): | ||||
|         """ | ||||
|         Updates the embedded documents with the given replacement values. This | ||||
|         function does not support mongoDB update operators such as ``inc__``. | ||||
|         Updates the embedded documents with the given update values. | ||||
|  | ||||
|         .. note:: | ||||
|             The embedded document changes are not automatically saved | ||||
| @@ -370,22 +372,22 @@ class EmbeddedDocumentList(BaseList): | ||||
|  | ||||
| class StrictDict(object): | ||||
|     __slots__ = () | ||||
|     _special_fields = {"get", "pop", "iteritems", "items", "keys", "create"} | ||||
|     _special_fields = set(['get', 'pop', 'iteritems', 'items', 'keys', 'create']) | ||||
|     _classes = {} | ||||
|  | ||||
|     def __init__(self, **kwargs): | ||||
|         for k, v in iteritems(kwargs): | ||||
|         for k, v in kwargs.iteritems(): | ||||
|             setattr(self, k, v) | ||||
|  | ||||
|     def __getitem__(self, key): | ||||
|         key = "_reserved_" + key if key in self._special_fields else key | ||||
|         key = '_reserved_' + key if key in self._special_fields else key | ||||
|         try: | ||||
|             return getattr(self, key) | ||||
|         except AttributeError: | ||||
|             raise KeyError(key) | ||||
|  | ||||
|     def __setitem__(self, key, value): | ||||
|         key = "_reserved_" + key if key in self._special_fields else key | ||||
|         key = '_reserved_' + key if key in self._special_fields else key | ||||
|         return setattr(self, key, value) | ||||
|  | ||||
|     def __contains__(self, key): | ||||
| @@ -422,7 +424,7 @@ class StrictDict(object): | ||||
|         return (key for key in self.__slots__ if hasattr(self, key)) | ||||
|  | ||||
|     def __len__(self): | ||||
|         return len(list(iteritems(self))) | ||||
|         return len(list(self.iteritems())) | ||||
|  | ||||
|     def __eq__(self, other): | ||||
|         return self.items() == other.items() | ||||
| @@ -432,60 +434,53 @@ class StrictDict(object): | ||||
|  | ||||
|     @classmethod | ||||
|     def create(cls, allowed_keys): | ||||
|         allowed_keys_tuple = tuple( | ||||
|             ("_reserved_" + k if k in cls._special_fields else k) for k in allowed_keys | ||||
|         ) | ||||
|         allowed_keys_tuple = tuple(('_reserved_' + k if k in cls._special_fields else k) for k in allowed_keys) | ||||
|         allowed_keys = frozenset(allowed_keys_tuple) | ||||
|         if allowed_keys not in cls._classes: | ||||
|  | ||||
|             class SpecificStrictDict(cls): | ||||
|                 __slots__ = allowed_keys_tuple | ||||
|  | ||||
|                 def __repr__(self): | ||||
|                     return "{%s}" % ", ".join( | ||||
|                         '"{0!s}": {1!r}'.format(k, v) for k, v in self.items() | ||||
|                     ) | ||||
|                     return '{%s}' % ', '.join('"{0!s}": {1!r}'.format(k, v) for k, v in self.items()) | ||||
|  | ||||
|             cls._classes[allowed_keys] = SpecificStrictDict | ||||
|         return cls._classes[allowed_keys] | ||||
|  | ||||
|  | ||||
| class LazyReference(DBRef): | ||||
|     __slots__ = ("_cached_doc", "passthrough", "document_type") | ||||
| class SemiStrictDict(StrictDict): | ||||
|     __slots__ = ('_extras', ) | ||||
|     _classes = {} | ||||
|  | ||||
|     def fetch(self, force=False): | ||||
|         if not self._cached_doc or force: | ||||
|             self._cached_doc = self.document_type.objects.get(pk=self.pk) | ||||
|             if not self._cached_doc: | ||||
|                 raise DoesNotExist("Trying to dereference unknown document %s" % (self)) | ||||
|         return self._cached_doc | ||||
|  | ||||
|     @property | ||||
|     def pk(self): | ||||
|         return self.id | ||||
|  | ||||
|     def __init__(self, document_type, pk, cached_doc=None, passthrough=False): | ||||
|         self.document_type = document_type | ||||
|         self._cached_doc = cached_doc | ||||
|         self.passthrough = passthrough | ||||
|         super(LazyReference, self).__init__( | ||||
|             self.document_type._get_collection_name(), pk | ||||
|         ) | ||||
|  | ||||
|     def __getitem__(self, name): | ||||
|         if not self.passthrough: | ||||
|             raise KeyError() | ||||
|         document = self.fetch() | ||||
|         return document[name] | ||||
|  | ||||
|     def __getattr__(self, name): | ||||
|         if not object.__getattribute__(self, "passthrough"): | ||||
|             raise AttributeError() | ||||
|         document = self.fetch() | ||||
|     def __getattr__(self, attr): | ||||
|         try: | ||||
|             return document[name] | ||||
|         except KeyError: | ||||
|             raise AttributeError() | ||||
|             super(SemiStrictDict, self).__getattr__(attr) | ||||
|         except AttributeError: | ||||
|             try: | ||||
|                 return self.__getattribute__('_extras')[attr] | ||||
|             except KeyError as e: | ||||
|                 raise AttributeError(e) | ||||
|  | ||||
|     def __repr__(self): | ||||
|         return "<LazyReference(%s, %r)>" % (self.document_type, self.pk) | ||||
|     def __setattr__(self, attr, value): | ||||
|         try: | ||||
|             super(SemiStrictDict, self).__setattr__(attr, value) | ||||
|         except AttributeError: | ||||
|             try: | ||||
|                 self._extras[attr] = value | ||||
|             except AttributeError: | ||||
|                 self._extras = {attr: value} | ||||
|  | ||||
|     def __delattr__(self, attr): | ||||
|         try: | ||||
|             super(SemiStrictDict, self).__delattr__(attr) | ||||
|         except AttributeError: | ||||
|             try: | ||||
|                 del self._extras[attr] | ||||
|             except KeyError as e: | ||||
|                 raise AttributeError(e) | ||||
|  | ||||
|     def __iter__(self): | ||||
|         try: | ||||
|             extras_iter = iter(self.__getattribute__('_extras')) | ||||
|         except AttributeError: | ||||
|             extras_iter = () | ||||
|         return itertools.chain(super(SemiStrictDict, self).__iter__(), extras_iter) | ||||
|   | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -5,14 +5,16 @@ import weakref | ||||
| from bson import DBRef, ObjectId, SON | ||||
| import pymongo | ||||
| import six | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine.base.common import UPDATE_OPERATORS | ||||
| from mongoengine.base.datastructures import BaseDict, BaseList, EmbeddedDocumentList | ||||
| from mongoengine.base.datastructures import (BaseDict, BaseList, | ||||
|                                              EmbeddedDocumentList) | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.errors import DeprecatedError, ValidationError | ||||
| from mongoengine.errors import ValidationError | ||||
|  | ||||
| __all__ = ("BaseField", "ComplexBaseField", "ObjectIdField", "GeoJsonBaseField") | ||||
|  | ||||
| __all__ = ('BaseField', 'ComplexBaseField', 'ObjectIdField', | ||||
|            'GeoJsonBaseField') | ||||
|  | ||||
|  | ||||
| class BaseField(object): | ||||
| @@ -21,7 +23,6 @@ class BaseField(object): | ||||
|  | ||||
|     .. versionchanged:: 0.5 - added verbose and help text | ||||
|     """ | ||||
|  | ||||
|     name = None | ||||
|     _geo_index = False | ||||
|     _auto_gen = False  # Call `generate` to generate a value | ||||
| @@ -33,21 +34,10 @@ class BaseField(object): | ||||
|     creation_counter = 0 | ||||
|     auto_creation_counter = -1 | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         db_field=None, | ||||
|         name=None, | ||||
|         required=False, | ||||
|         default=None, | ||||
|         unique=False, | ||||
|         unique_with=None, | ||||
|         primary_key=False, | ||||
|         validation=None, | ||||
|         choices=None, | ||||
|         null=False, | ||||
|         sparse=False, | ||||
|         **kwargs | ||||
|     ): | ||||
|     def __init__(self, db_field=None, name=None, required=False, default=None, | ||||
|                  unique=False, unique_with=None, primary_key=False, | ||||
|                  validation=None, choices=None, null=False, sparse=False, | ||||
|                  **kwargs): | ||||
|         """ | ||||
|         :param db_field: The database field to store this field in | ||||
|             (defaults to the name of the field) | ||||
| @@ -62,10 +52,10 @@ class BaseField(object): | ||||
|             unique with. | ||||
|         :param primary_key: Mark this field as the primary key. Defaults to False. | ||||
|         :param validation: (optional) A callable to validate the value of the | ||||
|             field.  The callable takes the value as parameter and should raise | ||||
|             a ValidationError if validation fails | ||||
|             field.  Generally this is deprecated in favour of the | ||||
|             `FIELD.validate` method | ||||
|         :param choices: (optional) The valid choices | ||||
|         :param null: (optional) If the field value can be null. If no and there is a default value | ||||
|         :param null: (optional) Is the field value can be null. If no and there is a default value | ||||
|             then the default value is set | ||||
|         :param sparse: (optional) `sparse=True` combined with `unique=True` and `required=False` | ||||
|             means that uniqueness won't be enforced for `None` values | ||||
| @@ -75,7 +65,7 @@ class BaseField(object): | ||||
|             existing attributes. Common metadata includes `verbose_name` and | ||||
|             `help_text`. | ||||
|         """ | ||||
|         self.db_field = (db_field or name) if not primary_key else "_id" | ||||
|         self.db_field = (db_field or name) if not primary_key else '_id' | ||||
|  | ||||
|         if name: | ||||
|             msg = 'Field\'s "name" attribute deprecated in favour of "db_field"' | ||||
| @@ -92,16 +82,17 @@ class BaseField(object): | ||||
|         self._owner_document = None | ||||
|  | ||||
|         # Make sure db_field is a string (if it's explicitly defined). | ||||
|         if self.db_field is not None and not isinstance( | ||||
|             self.db_field, six.string_types | ||||
|         if ( | ||||
|             self.db_field is not None and | ||||
|             not isinstance(self.db_field, six.string_types) | ||||
|         ): | ||||
|             raise TypeError("db_field should be a string.") | ||||
|             raise TypeError('db_field should be a string.') | ||||
|  | ||||
|         # Make sure db_field doesn't contain any forbidden characters. | ||||
|         if isinstance(self.db_field, six.string_types) and ( | ||||
|             "." in self.db_field | ||||
|             or "\0" in self.db_field | ||||
|             or self.db_field.startswith("$") | ||||
|             '.' in self.db_field or | ||||
|             '\0' in self.db_field or | ||||
|             self.db_field.startswith('$') | ||||
|         ): | ||||
|             raise ValueError( | ||||
|                 'field names cannot contain dots (".") or null characters ' | ||||
| @@ -111,17 +102,15 @@ class BaseField(object): | ||||
|         # Detect and report conflicts between metadata and base properties. | ||||
|         conflicts = set(dir(self)) & set(kwargs) | ||||
|         if conflicts: | ||||
|             raise TypeError( | ||||
|                 "%s already has attribute(s): %s" | ||||
|                 % (self.__class__.__name__, ", ".join(conflicts)) | ||||
|             ) | ||||
|             raise TypeError('%s already has attribute(s): %s' % ( | ||||
|                 self.__class__.__name__, ', '.join(conflicts))) | ||||
|  | ||||
|         # Assign metadata to the instance | ||||
|         # This efficient method is available because no __slots__ are defined. | ||||
|         self.__dict__.update(kwargs) | ||||
|  | ||||
|         # Adjust the appropriate creation counter, and save our local copy. | ||||
|         if self.db_field == "_id": | ||||
|         if self.db_field == '_id': | ||||
|             self.creation_counter = BaseField.auto_creation_counter | ||||
|             BaseField.auto_creation_counter -= 1 | ||||
|         else: | ||||
| @@ -139,9 +128,11 @@ class BaseField(object): | ||||
|         return instance._data.get(self.name) | ||||
|  | ||||
|     def __set__(self, instance, value): | ||||
|         """Descriptor for assigning a value to a field in a document.""" | ||||
|         # If setting to None and there is a default value provided for this | ||||
|         # field, then set the value to the default value. | ||||
|         """Descriptor for assigning a value to a field in a document. | ||||
|         """ | ||||
|  | ||||
|         # If setting to None and there is a default | ||||
|         # Then set the value to the default value | ||||
|         if value is None: | ||||
|             if self.null: | ||||
|                 value = None | ||||
| @@ -152,29 +143,24 @@ class BaseField(object): | ||||
|  | ||||
|         if instance._initialised: | ||||
|             try: | ||||
|                 value_has_changed = ( | ||||
|                     self.name not in instance._data | ||||
|                     or instance._data[self.name] != value | ||||
|                 ) | ||||
|                 if value_has_changed: | ||||
|                 if (self.name not in instance._data or | ||||
|                         instance._data[self.name] != value): | ||||
|                     instance._mark_as_changed(self.name) | ||||
|             except Exception: | ||||
|                 # Some values can't be compared and throw an error when we | ||||
|                 # attempt to do so (e.g. tz-naive and tz-aware datetimes). | ||||
|                 # Mark the field as changed in such cases. | ||||
|                 # Values cant be compared eg: naive and tz datetimes | ||||
|                 # So mark it as changed | ||||
|                 instance._mark_as_changed(self.name) | ||||
|  | ||||
|         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||
|         if isinstance(value, EmbeddedDocument): | ||||
|             value._instance = weakref.proxy(instance) | ||||
|         elif isinstance(value, (list, tuple)): | ||||
|             for v in value: | ||||
|                 if isinstance(v, EmbeddedDocument): | ||||
|                     v._instance = weakref.proxy(instance) | ||||
|  | ||||
|         instance._data[self.name] = value | ||||
|  | ||||
|     def error(self, message="", errors=None, field_name=None): | ||||
|     def error(self, message='', errors=None, field_name=None): | ||||
|         """Raise a ValidationError.""" | ||||
|         field_name = field_name if field_name else self.name | ||||
|         raise ValidationError(message, errors=errors, field_name=field_name) | ||||
| @@ -191,11 +177,11 @@ class BaseField(object): | ||||
|         """Helper method to call to_mongo with proper inputs.""" | ||||
|         f_inputs = self.to_mongo.__code__.co_varnames | ||||
|         ex_vars = {} | ||||
|         if "fields" in f_inputs: | ||||
|             ex_vars["fields"] = fields | ||||
|         if 'fields' in f_inputs: | ||||
|             ex_vars['fields'] = fields | ||||
|  | ||||
|         if "use_db_field" in f_inputs: | ||||
|             ex_vars["use_db_field"] = use_db_field | ||||
|         if 'use_db_field' in f_inputs: | ||||
|             ex_vars['use_db_field'] = use_db_field | ||||
|  | ||||
|         return self.to_mongo(value, **ex_vars) | ||||
|  | ||||
| @@ -210,8 +196,8 @@ class BaseField(object): | ||||
|         pass | ||||
|  | ||||
|     def _validate_choices(self, value): | ||||
|         Document = _import_class("Document") | ||||
|         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||
|         Document = _import_class('Document') | ||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||
|  | ||||
|         choice_list = self.choices | ||||
|         if isinstance(next(iter(choice_list)), (list, tuple)): | ||||
| @@ -222,13 +208,13 @@ class BaseField(object): | ||||
|         if isinstance(value, (Document, EmbeddedDocument)): | ||||
|             if not any(isinstance(value, c) for c in choice_list): | ||||
|                 self.error( | ||||
|                     "Value must be an instance of %s" % (six.text_type(choice_list)) | ||||
|                     'Value must be an instance of %s' % ( | ||||
|                         six.text_type(choice_list) | ||||
|                     ) | ||||
|                 ) | ||||
|         # Choices which are types other than Documents | ||||
|         else: | ||||
|             values = value if isinstance(value, (list, tuple)) else [value] | ||||
|             if len(set(values) - set(choice_list)): | ||||
|                 self.error("Value must be one of %s" % six.text_type(choice_list)) | ||||
|         elif value not in choice_list: | ||||
|             self.error('Value must be one of %s' % six.text_type(choice_list)) | ||||
|  | ||||
|     def _validate(self, value, **kwargs): | ||||
|         # Check the Choices Constraint | ||||
| @@ -238,23 +224,11 @@ class BaseField(object): | ||||
|         # check validation argument | ||||
|         if self.validation is not None: | ||||
|             if callable(self.validation): | ||||
|                 try: | ||||
|                     # breaking change of 0.18 | ||||
|                     # Get rid of True/False-type return for the validation method | ||||
|                     # in favor of having validation raising a ValidationError | ||||
|                     ret = self.validation(value) | ||||
|                     if ret is not None: | ||||
|                         raise DeprecatedError( | ||||
|                             "validation argument for `%s` must not return anything, " | ||||
|                             "it should raise a ValidationError if validation fails" | ||||
|                             % self.name | ||||
|                         ) | ||||
|                 except ValidationError as ex: | ||||
|                     self.error(str(ex)) | ||||
|                 if not self.validation(value): | ||||
|                     self.error('Value does not match custom validation method') | ||||
|             else: | ||||
|                 raise ValueError( | ||||
|                     'validation argument for `"%s"` must be a ' "callable." % self.name | ||||
|                 ) | ||||
|                 raise ValueError('validation argument for "%s" must be a ' | ||||
|                                  'callable.' % self.name) | ||||
|  | ||||
|         self.validate(value, **kwargs) | ||||
|  | ||||
| @@ -288,41 +262,28 @@ class ComplexBaseField(BaseField): | ||||
|             # Document class being used rather than a document object | ||||
|             return self | ||||
|  | ||||
|         ReferenceField = _import_class("ReferenceField") | ||||
|         GenericReferenceField = _import_class("GenericReferenceField") | ||||
|         EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField") | ||||
|         ReferenceField = _import_class('ReferenceField') | ||||
|         GenericReferenceField = _import_class('GenericReferenceField') | ||||
|         EmbeddedDocumentListField = _import_class('EmbeddedDocumentListField') | ||||
|         dereference = (self._auto_dereference and | ||||
|                        (self.field is None or isinstance(self.field, | ||||
|                                                          (GenericReferenceField, ReferenceField)))) | ||||
|  | ||||
|         auto_dereference = instance._fields[self.name]._auto_dereference | ||||
|         _dereference = _import_class('DeReference')() | ||||
|  | ||||
|         dereference = auto_dereference and ( | ||||
|             self.field is None | ||||
|             or isinstance(self.field, (GenericReferenceField, ReferenceField)) | ||||
|         ) | ||||
|  | ||||
|         _dereference = _import_class("DeReference")() | ||||
|  | ||||
|         if ( | ||||
|             instance._initialised | ||||
|             and dereference | ||||
|             and instance._data.get(self.name) | ||||
|             and not getattr(instance._data[self.name], "_dereferenced", False) | ||||
|         ): | ||||
|         self._auto_dereference = instance._fields[self.name]._auto_dereference | ||||
|         if instance._initialised and dereference and instance._data.get(self.name): | ||||
|             instance._data[self.name] = _dereference( | ||||
|                 instance._data.get(self.name), | ||||
|                 max_depth=1, | ||||
|                 instance=instance, | ||||
|                 name=self.name, | ||||
|                 instance._data.get(self.name), max_depth=1, instance=instance, | ||||
|                 name=self.name | ||||
|             ) | ||||
|             if hasattr(instance._data[self.name], "_dereferenced"): | ||||
|                 instance._data[self.name]._dereferenced = True | ||||
|  | ||||
|         value = super(ComplexBaseField, self).__get__(instance, owner) | ||||
|  | ||||
|         # Convert lists / values so we can watch for any changes on them | ||||
|         if isinstance(value, (list, tuple)): | ||||
|             if issubclass(type(self), EmbeddedDocumentListField) and not isinstance( | ||||
|                 value, EmbeddedDocumentList | ||||
|             ): | ||||
|             if (issubclass(type(self), EmbeddedDocumentListField) and | ||||
|                     not isinstance(value, EmbeddedDocumentList)): | ||||
|                 value = EmbeddedDocumentList(value, instance, self.name) | ||||
|             elif not isinstance(value, BaseList): | ||||
|                 value = BaseList(value, instance, self.name) | ||||
| @@ -331,13 +292,12 @@ class ComplexBaseField(BaseField): | ||||
|             value = BaseDict(value, instance, self.name) | ||||
|             instance._data[self.name] = value | ||||
|  | ||||
|         if ( | ||||
|             auto_dereference | ||||
|             and instance._initialised | ||||
|             and isinstance(value, (BaseList, BaseDict)) | ||||
|             and not value._dereferenced | ||||
|         ): | ||||
|             value = _dereference(value, max_depth=1, instance=instance, name=self.name) | ||||
|         if (self._auto_dereference and instance._initialised and | ||||
|                 isinstance(value, (BaseList, BaseDict)) and | ||||
|                 not value._dereferenced): | ||||
|             value = _dereference( | ||||
|                 value, max_depth=1, instance=instance, name=self.name | ||||
|             ) | ||||
|             value._dereferenced = True | ||||
|             instance._data[self.name] = value | ||||
|  | ||||
| @@ -348,72 +308,63 @@ class ComplexBaseField(BaseField): | ||||
|         if isinstance(value, six.string_types): | ||||
|             return value | ||||
|  | ||||
|         if hasattr(value, "to_python"): | ||||
|         if hasattr(value, 'to_python'): | ||||
|             return value.to_python() | ||||
|  | ||||
|         BaseDocument = _import_class("BaseDocument") | ||||
|         if isinstance(value, BaseDocument): | ||||
|             # Something is wrong, return the value as it is | ||||
|             return value | ||||
|  | ||||
|         is_list = False | ||||
|         if not hasattr(value, "items"): | ||||
|         if not hasattr(value, 'items'): | ||||
|             try: | ||||
|                 is_list = True | ||||
|                 value = {idx: v for idx, v in enumerate(value)} | ||||
|                 value = {k: v for k, v in enumerate(value)} | ||||
|             except TypeError:  # Not iterable return the value | ||||
|                 return value | ||||
|  | ||||
|         if self.field: | ||||
|             self.field._auto_dereference = self._auto_dereference | ||||
|             value_dict = { | ||||
|                 key: self.field.to_python(item) for key, item in value.items() | ||||
|             } | ||||
|             value_dict = {key: self.field.to_python(item) | ||||
|                           for key, item in value.items()} | ||||
|         else: | ||||
|             Document = _import_class("Document") | ||||
|             Document = _import_class('Document') | ||||
|             value_dict = {} | ||||
|             for k, v in value.items(): | ||||
|                 if isinstance(v, Document): | ||||
|                     # We need the id from the saved object to create the DBRef | ||||
|                     if v.pk is None: | ||||
|                         self.error( | ||||
|                             "You can only reference documents once they" | ||||
|                             " have been saved to the database" | ||||
|                         ) | ||||
|                         self.error('You can only reference documents once they' | ||||
|                                    ' have been saved to the database') | ||||
|                     collection = v._get_collection_name() | ||||
|                     value_dict[k] = DBRef(collection, v.pk) | ||||
|                 elif hasattr(v, "to_python"): | ||||
|                 elif hasattr(v, 'to_python'): | ||||
|                     value_dict[k] = v.to_python() | ||||
|                 else: | ||||
|                     value_dict[k] = self.to_python(v) | ||||
|  | ||||
|         if is_list:  # Convert back to a list | ||||
|             return [ | ||||
|                 v for _, v in sorted(value_dict.items(), key=operator.itemgetter(0)) | ||||
|             ] | ||||
|             return [v for _, v in sorted(value_dict.items(), | ||||
|                                          key=operator.itemgetter(0))] | ||||
|         return value_dict | ||||
|  | ||||
|     def to_mongo(self, value, use_db_field=True, fields=None): | ||||
|         """Convert a Python type to a MongoDB-compatible type.""" | ||||
|         Document = _import_class("Document") | ||||
|         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||
|         GenericReferenceField = _import_class("GenericReferenceField") | ||||
|         Document = _import_class('Document') | ||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||
|         GenericReferenceField = _import_class('GenericReferenceField') | ||||
|  | ||||
|         if isinstance(value, six.string_types): | ||||
|             return value | ||||
|  | ||||
|         if hasattr(value, "to_mongo"): | ||||
|         if hasattr(value, 'to_mongo'): | ||||
|             if isinstance(value, Document): | ||||
|                 return GenericReferenceField().to_mongo(value) | ||||
|             cls = value.__class__ | ||||
|             val = value.to_mongo(use_db_field, fields) | ||||
|             # If it's a document that is not inherited add _cls | ||||
|             if isinstance(value, EmbeddedDocument): | ||||
|                 val["_cls"] = cls.__name__ | ||||
|                 val['_cls'] = cls.__name__ | ||||
|             return val | ||||
|  | ||||
|         is_list = False | ||||
|         if not hasattr(value, "items"): | ||||
|         if not hasattr(value, 'items'): | ||||
|             try: | ||||
|                 is_list = True | ||||
|                 value = {k: v for k, v in enumerate(value)} | ||||
| @@ -423,51 +374,48 @@ class ComplexBaseField(BaseField): | ||||
|         if self.field: | ||||
|             value_dict = { | ||||
|                 key: self.field._to_mongo_safe_call(item, use_db_field, fields) | ||||
|                 for key, item in iteritems(value) | ||||
|                 for key, item in value.iteritems() | ||||
|             } | ||||
|         else: | ||||
|             value_dict = {} | ||||
|             for k, v in iteritems(value): | ||||
|             for k, v in value.iteritems(): | ||||
|                 if isinstance(v, Document): | ||||
|                     # We need the id from the saved object to create the DBRef | ||||
|                     if v.pk is None: | ||||
|                         self.error( | ||||
|                             "You can only reference documents once they" | ||||
|                             " have been saved to the database" | ||||
|                         ) | ||||
|                         self.error('You can only reference documents once they' | ||||
|                                    ' have been saved to the database') | ||||
|  | ||||
|                     # If its a document that is not inheritable it won't have | ||||
|                     # any _cls data so make it a generic reference allows | ||||
|                     # us to dereference | ||||
|                     meta = getattr(v, "_meta", {}) | ||||
|                     allow_inheritance = meta.get("allow_inheritance") | ||||
|                     meta = getattr(v, '_meta', {}) | ||||
|                     allow_inheritance = meta.get('allow_inheritance') | ||||
|                     if not allow_inheritance and not self.field: | ||||
|                         value_dict[k] = GenericReferenceField().to_mongo(v) | ||||
|                     else: | ||||
|                         collection = v._get_collection_name() | ||||
|                         value_dict[k] = DBRef(collection, v.pk) | ||||
|                 elif hasattr(v, "to_mongo"): | ||||
|                 elif hasattr(v, 'to_mongo'): | ||||
|                     cls = v.__class__ | ||||
|                     val = v.to_mongo(use_db_field, fields) | ||||
|                     # If it's a document that is not inherited add _cls | ||||
|                     if isinstance(v, (Document, EmbeddedDocument)): | ||||
|                         val["_cls"] = cls.__name__ | ||||
|                         val['_cls'] = cls.__name__ | ||||
|                     value_dict[k] = val | ||||
|                 else: | ||||
|                     value_dict[k] = self.to_mongo(v, use_db_field, fields) | ||||
|  | ||||
|         if is_list:  # Convert back to a list | ||||
|             return [ | ||||
|                 v for _, v in sorted(value_dict.items(), key=operator.itemgetter(0)) | ||||
|             ] | ||||
|             return [v for _, v in sorted(value_dict.items(), | ||||
|                                          key=operator.itemgetter(0))] | ||||
|         return value_dict | ||||
|  | ||||
|     def validate(self, value): | ||||
|         """If field is provided ensure the value is valid.""" | ||||
|         errors = {} | ||||
|         if self.field: | ||||
|             if hasattr(value, "iteritems") or hasattr(value, "items"): | ||||
|                 sequence = iteritems(value) | ||||
|             if hasattr(value, 'iteritems') or hasattr(value, 'items'): | ||||
|                 sequence = value.iteritems() | ||||
|             else: | ||||
|                 sequence = enumerate(value) | ||||
|             for k, v in sequence: | ||||
| @@ -480,10 +428,11 @@ class ComplexBaseField(BaseField): | ||||
|  | ||||
|             if errors: | ||||
|                 field_class = self.field.__class__.__name__ | ||||
|                 self.error("Invalid %s item (%s)" % (field_class, value), errors=errors) | ||||
|                 self.error('Invalid %s item (%s)' % (field_class, value), | ||||
|                            errors=errors) | ||||
|         # Don't allow empty values if required | ||||
|         if self.required and not value: | ||||
|             self.error("Field is required and cannot be empty") | ||||
|             self.error('Field is required and cannot be empty') | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         return self.to_mongo(value) | ||||
| @@ -526,7 +475,7 @@ class ObjectIdField(BaseField): | ||||
|         try: | ||||
|             ObjectId(six.text_type(value)) | ||||
|         except Exception: | ||||
|             self.error("Invalid Object ID") | ||||
|             self.error('Invalid Object ID') | ||||
|  | ||||
|  | ||||
| class GeoJsonBaseField(BaseField): | ||||
| @@ -536,14 +485,14 @@ class GeoJsonBaseField(BaseField): | ||||
|     """ | ||||
|  | ||||
|     _geo_index = pymongo.GEOSPHERE | ||||
|     _type = "GeoBase" | ||||
|     _type = 'GeoBase' | ||||
|  | ||||
|     def __init__(self, auto_index=True, *args, **kwargs): | ||||
|         """ | ||||
|         :param bool auto_index: Automatically create a '2dsphere' index.\ | ||||
|             Defaults to `True`. | ||||
|         """ | ||||
|         self._name = "%sField" % self._type | ||||
|         self._name = '%sField' % self._type | ||||
|         if not auto_index: | ||||
|             self._geo_index = False | ||||
|         super(GeoJsonBaseField, self).__init__(*args, **kwargs) | ||||
| @@ -551,58 +500,57 @@ class GeoJsonBaseField(BaseField): | ||||
|     def validate(self, value): | ||||
|         """Validate the GeoJson object based on its type.""" | ||||
|         if isinstance(value, dict): | ||||
|             if set(value.keys()) == {"type", "coordinates"}: | ||||
|                 if value["type"] != self._type: | ||||
|                     self.error('%s type must be "%s"' % (self._name, self._type)) | ||||
|                 return self.validate(value["coordinates"]) | ||||
|             if set(value.keys()) == set(['type', 'coordinates']): | ||||
|                 if value['type'] != self._type: | ||||
|                     self.error('%s type must be "%s"' % | ||||
|                                (self._name, self._type)) | ||||
|                 return self.validate(value['coordinates']) | ||||
|             else: | ||||
|                 self.error( | ||||
|                     "%s can only accept a valid GeoJson dictionary" | ||||
|                     " or lists of (x, y)" % self._name | ||||
|                 ) | ||||
|                 self.error('%s can only accept a valid GeoJson dictionary' | ||||
|                            ' or lists of (x, y)' % self._name) | ||||
|                 return | ||||
|         elif not isinstance(value, (list, tuple)): | ||||
|             self.error("%s can only accept lists of [x, y]" % self._name) | ||||
|             self.error('%s can only accept lists of [x, y]' % self._name) | ||||
|             return | ||||
|  | ||||
|         validate = getattr(self, "_validate_%s" % self._type.lower()) | ||||
|         validate = getattr(self, '_validate_%s' % self._type.lower()) | ||||
|         error = validate(value) | ||||
|         if error: | ||||
|             self.error(error) | ||||
|  | ||||
|     def _validate_polygon(self, value, top_level=True): | ||||
|         if not isinstance(value, (list, tuple)): | ||||
|             return "Polygons must contain list of linestrings" | ||||
|             return 'Polygons must contain list of linestrings' | ||||
|  | ||||
|         # Quick and dirty validator | ||||
|         try: | ||||
|             value[0][0][0] | ||||
|         except (TypeError, IndexError): | ||||
|             return "Invalid Polygon must contain at least one valid linestring" | ||||
|             return 'Invalid Polygon must contain at least one valid linestring' | ||||
|  | ||||
|         errors = [] | ||||
|         for val in value: | ||||
|             error = self._validate_linestring(val, False) | ||||
|             if not error and val[0] != val[-1]: | ||||
|                 error = "LineStrings must start and end at the same point" | ||||
|                 error = 'LineStrings must start and end at the same point' | ||||
|             if error and error not in errors: | ||||
|                 errors.append(error) | ||||
|         if errors: | ||||
|             if top_level: | ||||
|                 return "Invalid Polygon:\n%s" % ", ".join(errors) | ||||
|                 return 'Invalid Polygon:\n%s' % ', '.join(errors) | ||||
|             else: | ||||
|                 return "%s" % ", ".join(errors) | ||||
|                 return '%s' % ', '.join(errors) | ||||
|  | ||||
|     def _validate_linestring(self, value, top_level=True): | ||||
|         """Validate a linestring.""" | ||||
|         if not isinstance(value, (list, tuple)): | ||||
|             return "LineStrings must contain list of coordinate pairs" | ||||
|             return 'LineStrings must contain list of coordinate pairs' | ||||
|  | ||||
|         # Quick and dirty validator | ||||
|         try: | ||||
|             value[0][0] | ||||
|         except (TypeError, IndexError): | ||||
|             return "Invalid LineString must contain at least one valid point" | ||||
|             return 'Invalid LineString must contain at least one valid point' | ||||
|  | ||||
|         errors = [] | ||||
|         for val in value: | ||||
| @@ -611,30 +559,29 @@ class GeoJsonBaseField(BaseField): | ||||
|                 errors.append(error) | ||||
|         if errors: | ||||
|             if top_level: | ||||
|                 return "Invalid LineString:\n%s" % ", ".join(errors) | ||||
|                 return 'Invalid LineString:\n%s' % ', '.join(errors) | ||||
|             else: | ||||
|                 return "%s" % ", ".join(errors) | ||||
|                 return '%s' % ', '.join(errors) | ||||
|  | ||||
|     def _validate_point(self, value): | ||||
|         """Validate each set of coords""" | ||||
|         if not isinstance(value, (list, tuple)): | ||||
|             return "Points must be a list of coordinate pairs" | ||||
|             return 'Points must be a list of coordinate pairs' | ||||
|         elif not len(value) == 2: | ||||
|             return "Value (%s) must be a two-dimensional point" % repr(value) | ||||
|         elif not isinstance(value[0], (float, int)) or not isinstance( | ||||
|             value[1], (float, int) | ||||
|         ): | ||||
|             return "Both values (%s) in point must be float or int" % repr(value) | ||||
|             return 'Value (%s) must be a two-dimensional point' % repr(value) | ||||
|         elif (not isinstance(value[0], (float, int)) or | ||||
|               not isinstance(value[1], (float, int))): | ||||
|             return 'Both values (%s) in point must be float or int' % repr(value) | ||||
|  | ||||
|     def _validate_multipoint(self, value): | ||||
|         if not isinstance(value, (list, tuple)): | ||||
|             return "MultiPoint must be a list of Point" | ||||
|             return 'MultiPoint must be a list of Point' | ||||
|  | ||||
|         # Quick and dirty validator | ||||
|         try: | ||||
|             value[0][0] | ||||
|         except (TypeError, IndexError): | ||||
|             return "Invalid MultiPoint must contain at least one valid point" | ||||
|             return 'Invalid MultiPoint must contain at least one valid point' | ||||
|  | ||||
|         errors = [] | ||||
|         for point in value: | ||||
| @@ -643,17 +590,17 @@ class GeoJsonBaseField(BaseField): | ||||
|                 errors.append(error) | ||||
|  | ||||
|         if errors: | ||||
|             return "%s" % ", ".join(errors) | ||||
|             return '%s' % ', '.join(errors) | ||||
|  | ||||
|     def _validate_multilinestring(self, value, top_level=True): | ||||
|         if not isinstance(value, (list, tuple)): | ||||
|             return "MultiLineString must be a list of LineString" | ||||
|             return 'MultiLineString must be a list of LineString' | ||||
|  | ||||
|         # Quick and dirty validator | ||||
|         try: | ||||
|             value[0][0][0] | ||||
|         except (TypeError, IndexError): | ||||
|             return "Invalid MultiLineString must contain at least one valid linestring" | ||||
|             return 'Invalid MultiLineString must contain at least one valid linestring' | ||||
|  | ||||
|         errors = [] | ||||
|         for linestring in value: | ||||
| @@ -663,19 +610,19 @@ class GeoJsonBaseField(BaseField): | ||||
|  | ||||
|         if errors: | ||||
|             if top_level: | ||||
|                 return "Invalid MultiLineString:\n%s" % ", ".join(errors) | ||||
|                 return 'Invalid MultiLineString:\n%s' % ', '.join(errors) | ||||
|             else: | ||||
|                 return "%s" % ", ".join(errors) | ||||
|                 return '%s' % ', '.join(errors) | ||||
|  | ||||
|     def _validate_multipolygon(self, value): | ||||
|         if not isinstance(value, (list, tuple)): | ||||
|             return "MultiPolygon must be a list of Polygon" | ||||
|             return 'MultiPolygon must be a list of Polygon' | ||||
|  | ||||
|         # Quick and dirty validator | ||||
|         try: | ||||
|             value[0][0][0][0] | ||||
|         except (TypeError, IndexError): | ||||
|             return "Invalid MultiPolygon must contain at least one valid Polygon" | ||||
|             return 'Invalid MultiPolygon must contain at least one valid Polygon' | ||||
|  | ||||
|         errors = [] | ||||
|         for polygon in value: | ||||
| @@ -684,9 +631,9 @@ class GeoJsonBaseField(BaseField): | ||||
|                 errors.append(error) | ||||
|  | ||||
|         if errors: | ||||
|             return "Invalid MultiPolygon:\n%s" % ", ".join(errors) | ||||
|             return 'Invalid MultiPolygon:\n%s' % ', '.join(errors) | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         if isinstance(value, dict): | ||||
|             return value | ||||
|         return SON([("type", self._type), ("coordinates", value)]) | ||||
|         return SON([('type', self._type), ('coordinates', value)]) | ||||
|   | ||||
| @@ -1,75 +1,68 @@ | ||||
| import itertools | ||||
| import warnings | ||||
|  | ||||
| import six | ||||
| from six import iteritems, itervalues | ||||
|  | ||||
| from mongoengine.base.common import _document_registry | ||||
| from mongoengine.base.fields import BaseField, ComplexBaseField, ObjectIdField | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.errors import InvalidDocumentError | ||||
| from mongoengine.queryset import ( | ||||
|     DO_NOTHING, | ||||
|     DoesNotExist, | ||||
|     MultipleObjectsReturned, | ||||
|     QuerySetManager, | ||||
| ) | ||||
| from mongoengine.queryset import (DO_NOTHING, DoesNotExist, | ||||
|                                   MultipleObjectsReturned, | ||||
|                                   QuerySetManager) | ||||
|  | ||||
|  | ||||
| __all__ = ("DocumentMetaclass", "TopLevelDocumentMetaclass") | ||||
| __all__ = ('DocumentMetaclass', 'TopLevelDocumentMetaclass') | ||||
|  | ||||
|  | ||||
| class DocumentMetaclass(type): | ||||
|     """Metaclass for all documents.""" | ||||
|  | ||||
|     # TODO lower complexity of this method | ||||
|     def __new__(mcs, name, bases, attrs): | ||||
|         flattened_bases = mcs._get_bases(bases) | ||||
|         super_new = super(DocumentMetaclass, mcs).__new__ | ||||
|     def __new__(cls, name, bases, attrs): | ||||
|         flattened_bases = cls._get_bases(bases) | ||||
|         super_new = super(DocumentMetaclass, cls).__new__ | ||||
|  | ||||
|         # If a base class just call super | ||||
|         metaclass = attrs.get("my_metaclass") | ||||
|         metaclass = attrs.get('my_metaclass') | ||||
|         if metaclass and issubclass(metaclass, DocumentMetaclass): | ||||
|             return super_new(mcs, name, bases, attrs) | ||||
|             return super_new(cls, name, bases, attrs) | ||||
|  | ||||
|         attrs["_is_document"] = attrs.get("_is_document", False) | ||||
|         attrs["_cached_reference_fields"] = [] | ||||
|         attrs['_is_document'] = attrs.get('_is_document', False) | ||||
|         attrs['_cached_reference_fields'] = [] | ||||
|  | ||||
|         # EmbeddedDocuments could have meta data for inheritance | ||||
|         if "meta" in attrs: | ||||
|             attrs["_meta"] = attrs.pop("meta") | ||||
|         if 'meta' in attrs: | ||||
|             attrs['_meta'] = attrs.pop('meta') | ||||
|  | ||||
|         # EmbeddedDocuments should inherit meta data | ||||
|         if "_meta" not in attrs: | ||||
|         if '_meta' not in attrs: | ||||
|             meta = MetaDict() | ||||
|             for base in flattened_bases[::-1]: | ||||
|                 # Add any mixin metadata from plain objects | ||||
|                 if hasattr(base, "meta"): | ||||
|                 if hasattr(base, 'meta'): | ||||
|                     meta.merge(base.meta) | ||||
|                 elif hasattr(base, "_meta"): | ||||
|                 elif hasattr(base, '_meta'): | ||||
|                     meta.merge(base._meta) | ||||
|             attrs["_meta"] = meta | ||||
|             attrs["_meta"][ | ||||
|                 "abstract" | ||||
|             ] = False  # 789: EmbeddedDocument shouldn't inherit abstract | ||||
|             attrs['_meta'] = meta | ||||
|             attrs['_meta']['abstract'] = False  # 789: EmbeddedDocument shouldn't inherit abstract | ||||
|  | ||||
|         # If allow_inheritance is True, add a "_cls" string field to the attrs | ||||
|         if attrs["_meta"].get("allow_inheritance"): | ||||
|             StringField = _import_class("StringField") | ||||
|             attrs["_cls"] = StringField() | ||||
|         if attrs['_meta'].get('allow_inheritance'): | ||||
|             StringField = _import_class('StringField') | ||||
|             attrs['_cls'] = StringField() | ||||
|  | ||||
|         # Handle document Fields | ||||
|  | ||||
|         # Merge all fields from subclasses | ||||
|         doc_fields = {} | ||||
|         for base in flattened_bases[::-1]: | ||||
|             if hasattr(base, "_fields"): | ||||
|             if hasattr(base, '_fields'): | ||||
|                 doc_fields.update(base._fields) | ||||
|  | ||||
|             # Standard object mixin - merge in any Fields | ||||
|             if not hasattr(base, "_meta"): | ||||
|             if not hasattr(base, '_meta'): | ||||
|                 base_fields = {} | ||||
|                 for attr_name, attr_value in iteritems(base.__dict__): | ||||
|                 for attr_name, attr_value in base.__dict__.iteritems(): | ||||
|                     if not isinstance(attr_value, BaseField): | ||||
|                         continue | ||||
|                     attr_value.name = attr_name | ||||
| @@ -81,7 +74,7 @@ class DocumentMetaclass(type): | ||||
|  | ||||
|         # Discover any document fields | ||||
|         field_names = {} | ||||
|         for attr_name, attr_value in iteritems(attrs): | ||||
|         for attr_name, attr_value in attrs.iteritems(): | ||||
|             if not isinstance(attr_value, BaseField): | ||||
|                 continue | ||||
|             attr_value.name = attr_name | ||||
| @@ -90,31 +83,27 @@ class DocumentMetaclass(type): | ||||
|             doc_fields[attr_name] = attr_value | ||||
|  | ||||
|             # Count names to ensure no db_field redefinitions | ||||
|             field_names[attr_value.db_field] = ( | ||||
|                 field_names.get(attr_value.db_field, 0) + 1 | ||||
|             ) | ||||
|             field_names[attr_value.db_field] = field_names.get( | ||||
|                 attr_value.db_field, 0) + 1 | ||||
|  | ||||
|         # Ensure no duplicate db_fields | ||||
|         duplicate_db_fields = [k for k, v in field_names.items() if v > 1] | ||||
|         if duplicate_db_fields: | ||||
|             msg = "Multiple db_fields defined for: %s " % ", ".join(duplicate_db_fields) | ||||
|             msg = ('Multiple db_fields defined for: %s ' % | ||||
|                    ', '.join(duplicate_db_fields)) | ||||
|             raise InvalidDocumentError(msg) | ||||
|  | ||||
|         # Set _fields and db_field maps | ||||
|         attrs["_fields"] = doc_fields | ||||
|         attrs["_db_field_map"] = { | ||||
|             k: getattr(v, "db_field", k) for k, v in doc_fields.items() | ||||
|         } | ||||
|         attrs["_reverse_db_field_map"] = { | ||||
|             v: k for k, v in attrs["_db_field_map"].items() | ||||
|         attrs['_fields'] = doc_fields | ||||
|         attrs['_db_field_map'] = {k: getattr(v, 'db_field', k) | ||||
|                                   for k, v in doc_fields.items()} | ||||
|         attrs['_reverse_db_field_map'] = { | ||||
|             v: k for k, v in attrs['_db_field_map'].items() | ||||
|         } | ||||
|  | ||||
|         attrs["_fields_ordered"] = tuple( | ||||
|             i[1] | ||||
|             for i in sorted( | ||||
|                 (v.creation_counter, v.name) for v in itervalues(doc_fields) | ||||
|             ) | ||||
|         ) | ||||
|         attrs['_fields_ordered'] = tuple(i[1] for i in sorted( | ||||
|                                          (v.creation_counter, v.name) | ||||
|                                          for v in doc_fields.itervalues())) | ||||
|  | ||||
|         # | ||||
|         # Set document hierarchy | ||||
| @@ -122,37 +111,34 @@ class DocumentMetaclass(type): | ||||
|         superclasses = () | ||||
|         class_name = [name] | ||||
|         for base in flattened_bases: | ||||
|             if not getattr(base, "_is_base_cls", True) and not getattr( | ||||
|                 base, "_meta", {} | ||||
|             ).get("abstract", True): | ||||
|             if (not getattr(base, '_is_base_cls', True) and | ||||
|                     not getattr(base, '_meta', {}).get('abstract', True)): | ||||
|                 # Collate hierarchy for _cls and _subclasses | ||||
|                 class_name.append(base.__name__) | ||||
|  | ||||
|             if hasattr(base, "_meta"): | ||||
|             if hasattr(base, '_meta'): | ||||
|                 # Warn if allow_inheritance isn't set and prevent | ||||
|                 # inheritance of classes where inheritance is set to False | ||||
|                 allow_inheritance = base._meta.get("allow_inheritance") | ||||
|                 if not allow_inheritance and not base._meta.get("abstract"): | ||||
|                     raise ValueError( | ||||
|                         "Document %s may not be subclassed. " | ||||
|                         'To enable inheritance, use the "allow_inheritance" meta attribute.' | ||||
|                         % base.__name__ | ||||
|                     ) | ||||
|                 allow_inheritance = base._meta.get('allow_inheritance') | ||||
|                 if not allow_inheritance and not base._meta.get('abstract'): | ||||
|                     raise ValueError('Document %s may not be subclassed' % | ||||
|                                      base.__name__) | ||||
|  | ||||
|         # Get superclasses from last base superclass | ||||
|         document_bases = [b for b in flattened_bases if hasattr(b, "_class_name")] | ||||
|         document_bases = [b for b in flattened_bases | ||||
|                           if hasattr(b, '_class_name')] | ||||
|         if document_bases: | ||||
|             superclasses = document_bases[0]._superclasses | ||||
|             superclasses += (document_bases[0]._class_name,) | ||||
|             superclasses += (document_bases[0]._class_name, ) | ||||
|  | ||||
|         _cls = ".".join(reversed(class_name)) | ||||
|         attrs["_class_name"] = _cls | ||||
|         attrs["_superclasses"] = superclasses | ||||
|         attrs["_subclasses"] = (_cls,) | ||||
|         attrs["_types"] = attrs["_subclasses"]  # TODO depreciate _types | ||||
|         _cls = '.'.join(reversed(class_name)) | ||||
|         attrs['_class_name'] = _cls | ||||
|         attrs['_superclasses'] = superclasses | ||||
|         attrs['_subclasses'] = (_cls, ) | ||||
|         attrs['_types'] = attrs['_subclasses']  # TODO depreciate _types | ||||
|  | ||||
|         # Create the new_class | ||||
|         new_class = super_new(mcs, name, bases, attrs) | ||||
|         new_class = super_new(cls, name, bases, attrs) | ||||
|  | ||||
|         # Set _subclasses | ||||
|         for base in document_bases: | ||||
| @@ -160,12 +146,8 @@ class DocumentMetaclass(type): | ||||
|                 base._subclasses += (_cls,) | ||||
|             base._types = base._subclasses  # TODO depreciate _types | ||||
|  | ||||
|         ( | ||||
|             Document, | ||||
|             EmbeddedDocument, | ||||
|             DictField, | ||||
|             CachedReferenceField, | ||||
|         ) = mcs._import_classes() | ||||
|         (Document, EmbeddedDocument, DictField, | ||||
|          CachedReferenceField) = cls._import_classes() | ||||
|  | ||||
|         if issubclass(new_class, Document): | ||||
|             new_class._collection = None | ||||
| @@ -184,83 +166,86 @@ class DocumentMetaclass(type): | ||||
|             for val in new_class.__dict__.values(): | ||||
|                 if isinstance(val, classmethod): | ||||
|                     f = val.__get__(new_class) | ||||
|                     if hasattr(f, "__func__") and not hasattr(f, "im_func"): | ||||
|                         f.__dict__.update({"im_func": getattr(f, "__func__")}) | ||||
|                     if hasattr(f, "__self__") and not hasattr(f, "im_self"): | ||||
|                         f.__dict__.update({"im_self": getattr(f, "__self__")}) | ||||
|                     if hasattr(f, '__func__') and not hasattr(f, 'im_func'): | ||||
|                         f.__dict__.update({'im_func': getattr(f, '__func__')}) | ||||
|                     if hasattr(f, '__self__') and not hasattr(f, 'im_self'): | ||||
|                         f.__dict__.update({'im_self': getattr(f, '__self__')}) | ||||
|  | ||||
|         # Handle delete rules | ||||
|         for field in itervalues(new_class._fields): | ||||
|         for field in new_class._fields.itervalues(): | ||||
|             f = field | ||||
|             if f.owner_document is None: | ||||
|                 f.owner_document = new_class | ||||
|             delete_rule = getattr(f, "reverse_delete_rule", DO_NOTHING) | ||||
|             delete_rule = getattr(f, 'reverse_delete_rule', DO_NOTHING) | ||||
|             if isinstance(f, CachedReferenceField): | ||||
|  | ||||
|                 if issubclass(new_class, EmbeddedDocument): | ||||
|                     raise InvalidDocumentError('CachedReferenceFields is not ' | ||||
|                                                'allowed in EmbeddedDocuments') | ||||
|                 if not f.document_type: | ||||
|                     raise InvalidDocumentError( | ||||
|                         "CachedReferenceFields is not allowed in EmbeddedDocuments" | ||||
|                     ) | ||||
|                         'Document is not available to sync') | ||||
|  | ||||
|                 if f.auto_sync: | ||||
|                     f.start_listener() | ||||
|  | ||||
|                 f.document_type._cached_reference_fields.append(f) | ||||
|  | ||||
|             if isinstance(f, ComplexBaseField) and hasattr(f, "field"): | ||||
|                 delete_rule = getattr(f.field, "reverse_delete_rule", DO_NOTHING) | ||||
|             if isinstance(f, ComplexBaseField) and hasattr(f, 'field'): | ||||
|                 delete_rule = getattr(f.field, | ||||
|                                       'reverse_delete_rule', | ||||
|                                       DO_NOTHING) | ||||
|                 if isinstance(f, DictField) and delete_rule != DO_NOTHING: | ||||
|                     msg = ( | ||||
|                         "Reverse delete rules are not supported " | ||||
|                         "for %s (field: %s)" % (field.__class__.__name__, field.name) | ||||
|                     ) | ||||
|                     msg = ('Reverse delete rules are not supported ' | ||||
|                            'for %s (field: %s)' % | ||||
|                            (field.__class__.__name__, field.name)) | ||||
|                     raise InvalidDocumentError(msg) | ||||
|  | ||||
|                 f = field.field | ||||
|  | ||||
|             if delete_rule != DO_NOTHING: | ||||
|                 if issubclass(new_class, EmbeddedDocument): | ||||
|                     msg = ( | ||||
|                         "Reverse delete rules are not supported for " | ||||
|                         "EmbeddedDocuments (field: %s)" % field.name | ||||
|                     ) | ||||
|                     msg = ('Reverse delete rules are not supported for ' | ||||
|                            'EmbeddedDocuments (field: %s)' % field.name) | ||||
|                     raise InvalidDocumentError(msg) | ||||
|                 f.document_type.register_delete_rule(new_class, field.name, delete_rule) | ||||
|                 f.document_type.register_delete_rule(new_class, | ||||
|                                                      field.name, delete_rule) | ||||
|  | ||||
|             if ( | ||||
|                 field.name | ||||
|                 and hasattr(Document, field.name) | ||||
|                 and EmbeddedDocument not in new_class.mro() | ||||
|             ): | ||||
|                 msg = "%s is a document method and not a valid field name" % field.name | ||||
|             if (field.name and hasattr(Document, field.name) and | ||||
|                     EmbeddedDocument not in new_class.mro()): | ||||
|                 msg = ('%s is a document method and not a valid ' | ||||
|                        'field name' % field.name) | ||||
|                 raise InvalidDocumentError(msg) | ||||
|  | ||||
|         return new_class | ||||
|  | ||||
|     def add_to_class(self, name, value): | ||||
|         setattr(self, name, value) | ||||
|  | ||||
|     @classmethod | ||||
|     def _get_bases(mcs, bases): | ||||
|     def _get_bases(cls, bases): | ||||
|         if isinstance(bases, BasesTuple): | ||||
|             return bases | ||||
|         seen = [] | ||||
|         bases = mcs.__get_bases(bases) | ||||
|         bases = cls.__get_bases(bases) | ||||
|         unique_bases = (b for b in bases if not (b in seen or seen.append(b))) | ||||
|         return BasesTuple(unique_bases) | ||||
|  | ||||
|     @classmethod | ||||
|     def __get_bases(mcs, bases): | ||||
|     def __get_bases(cls, bases): | ||||
|         for base in bases: | ||||
|             if base is object: | ||||
|                 continue | ||||
|             yield base | ||||
|             for child_base in mcs.__get_bases(base.__bases__): | ||||
|             for child_base in cls.__get_bases(base.__bases__): | ||||
|                 yield child_base | ||||
|  | ||||
|     @classmethod | ||||
|     def _import_classes(mcs): | ||||
|         Document = _import_class("Document") | ||||
|         EmbeddedDocument = _import_class("EmbeddedDocument") | ||||
|         DictField = _import_class("DictField") | ||||
|         CachedReferenceField = _import_class("CachedReferenceField") | ||||
|     def _import_classes(cls): | ||||
|         Document = _import_class('Document') | ||||
|         EmbeddedDocument = _import_class('EmbeddedDocument') | ||||
|         DictField = _import_class('DictField') | ||||
|         CachedReferenceField = _import_class('CachedReferenceField') | ||||
|         return Document, EmbeddedDocument, DictField, CachedReferenceField | ||||
|  | ||||
|  | ||||
| @@ -269,214 +254,193 @@ class TopLevelDocumentMetaclass(DocumentMetaclass): | ||||
|     collection in the database. | ||||
|     """ | ||||
|  | ||||
|     def __new__(mcs, name, bases, attrs): | ||||
|         flattened_bases = mcs._get_bases(bases) | ||||
|         super_new = super(TopLevelDocumentMetaclass, mcs).__new__ | ||||
|     def __new__(cls, name, bases, attrs): | ||||
|         flattened_bases = cls._get_bases(bases) | ||||
|         super_new = super(TopLevelDocumentMetaclass, cls).__new__ | ||||
|  | ||||
|         # Set default _meta data if base class, otherwise get user defined meta | ||||
|         if attrs.get("my_metaclass") == TopLevelDocumentMetaclass: | ||||
|         if attrs.get('my_metaclass') == TopLevelDocumentMetaclass: | ||||
|             # defaults | ||||
|             attrs["_meta"] = { | ||||
|                 "abstract": True, | ||||
|                 "max_documents": None, | ||||
|                 "max_size": None, | ||||
|                 "ordering": [],  # default ordering applied at runtime | ||||
|                 "indexes": [],  # indexes to be ensured at runtime | ||||
|                 "id_field": None, | ||||
|                 "index_background": False, | ||||
|                 "index_drop_dups": False, | ||||
|                 "index_opts": None, | ||||
|                 "delete_rules": None, | ||||
|             attrs['_meta'] = { | ||||
|                 'abstract': True, | ||||
|                 'max_documents': None, | ||||
|                 'max_size': None, | ||||
|                 'ordering': [],  # default ordering applied at runtime | ||||
|                 'indexes': [],  # indexes to be ensured at runtime | ||||
|                 'id_field': None, | ||||
|                 'index_background': False, | ||||
|                 'index_drop_dups': False, | ||||
|                 'index_opts': None, | ||||
|                 'delete_rules': None, | ||||
|  | ||||
|                 # allow_inheritance can be True, False, and None. True means | ||||
|                 # "allow inheritance", False means "don't allow inheritance", | ||||
|                 # None means "do whatever your parent does, or don't allow | ||||
|                 # inheritance if you're a top-level class". | ||||
|                 "allow_inheritance": None, | ||||
|                 'allow_inheritance': None, | ||||
|             } | ||||
|             attrs["_is_base_cls"] = True | ||||
|             attrs["_meta"].update(attrs.get("meta", {})) | ||||
|             attrs['_is_base_cls'] = True | ||||
|             attrs['_meta'].update(attrs.get('meta', {})) | ||||
|         else: | ||||
|             attrs["_meta"] = attrs.get("meta", {}) | ||||
|             attrs['_meta'] = attrs.get('meta', {}) | ||||
|             # Explicitly set abstract to false unless set | ||||
|             attrs["_meta"]["abstract"] = attrs["_meta"].get("abstract", False) | ||||
|             attrs["_is_base_cls"] = False | ||||
|             attrs['_meta']['abstract'] = attrs['_meta'].get('abstract', False) | ||||
|             attrs['_is_base_cls'] = False | ||||
|  | ||||
|         # Set flag marking as document class - as opposed to an object mixin | ||||
|         attrs["_is_document"] = True | ||||
|         attrs['_is_document'] = True | ||||
|  | ||||
|         # Ensure queryset_class is inherited | ||||
|         if "objects" in attrs: | ||||
|             manager = attrs["objects"] | ||||
|             if hasattr(manager, "queryset_class"): | ||||
|                 attrs["_meta"]["queryset_class"] = manager.queryset_class | ||||
|         if 'objects' in attrs: | ||||
|             manager = attrs['objects'] | ||||
|             if hasattr(manager, 'queryset_class'): | ||||
|                 attrs['_meta']['queryset_class'] = manager.queryset_class | ||||
|  | ||||
|         # Clean up top level meta | ||||
|         if "meta" in attrs: | ||||
|             del attrs["meta"] | ||||
|         if 'meta' in attrs: | ||||
|             del attrs['meta'] | ||||
|  | ||||
|         # Find the parent document class | ||||
|         parent_doc_cls = [ | ||||
|             b for b in flattened_bases if b.__class__ == TopLevelDocumentMetaclass | ||||
|         ] | ||||
|         parent_doc_cls = [b for b in flattened_bases | ||||
|                           if b.__class__ == TopLevelDocumentMetaclass] | ||||
|         parent_doc_cls = None if not parent_doc_cls else parent_doc_cls[0] | ||||
|  | ||||
|         # Prevent classes setting collection different to their parents | ||||
|         # If parent wasn't an abstract class | ||||
|         if ( | ||||
|             parent_doc_cls | ||||
|             and "collection" in attrs.get("_meta", {}) | ||||
|             and not parent_doc_cls._meta.get("abstract", True) | ||||
|         ): | ||||
|             msg = "Trying to set a collection on a subclass (%s)" % name | ||||
|         if (parent_doc_cls and 'collection' in attrs.get('_meta', {}) and | ||||
|                 not parent_doc_cls._meta.get('abstract', True)): | ||||
|             msg = 'Trying to set a collection on a subclass (%s)' % name | ||||
|             warnings.warn(msg, SyntaxWarning) | ||||
|             del attrs["_meta"]["collection"] | ||||
|             del attrs['_meta']['collection'] | ||||
|  | ||||
|         # Ensure abstract documents have abstract bases | ||||
|         if attrs.get("_is_base_cls") or attrs["_meta"].get("abstract"): | ||||
|             if parent_doc_cls and not parent_doc_cls._meta.get("abstract", False): | ||||
|                 msg = "Abstract document cannot have non-abstract base" | ||||
|         if attrs.get('_is_base_cls') or attrs['_meta'].get('abstract'): | ||||
|             if (parent_doc_cls and | ||||
|                     not parent_doc_cls._meta.get('abstract', False)): | ||||
|                 msg = 'Abstract document cannot have non-abstract base' | ||||
|                 raise ValueError(msg) | ||||
|             return super_new(mcs, name, bases, attrs) | ||||
|             return super_new(cls, name, bases, attrs) | ||||
|  | ||||
|         # Merge base class metas. | ||||
|         # Uses a special MetaDict that handles various merging rules | ||||
|         meta = MetaDict() | ||||
|         for base in flattened_bases[::-1]: | ||||
|             # Add any mixin metadata from plain objects | ||||
|             if hasattr(base, "meta"): | ||||
|             if hasattr(base, 'meta'): | ||||
|                 meta.merge(base.meta) | ||||
|             elif hasattr(base, "_meta"): | ||||
|             elif hasattr(base, '_meta'): | ||||
|                 meta.merge(base._meta) | ||||
|  | ||||
|             # Set collection in the meta if its callable | ||||
|             if getattr(base, "_is_document", False) and not base._meta.get("abstract"): | ||||
|                 collection = meta.get("collection", None) | ||||
|             if (getattr(base, '_is_document', False) and | ||||
|                     not base._meta.get('abstract')): | ||||
|                 collection = meta.get('collection', None) | ||||
|                 if callable(collection): | ||||
|                     meta["collection"] = collection(base) | ||||
|                     meta['collection'] = collection(base) | ||||
|  | ||||
|         meta.merge(attrs.get("_meta", {}))  # Top level meta | ||||
|         meta.merge(attrs.get('_meta', {}))  # Top level meta | ||||
|  | ||||
|         # Only simple classes (i.e. direct subclasses of Document) may set | ||||
|         # allow_inheritance to False. If the base Document allows inheritance, | ||||
|         # none of its subclasses can override allow_inheritance to False. | ||||
|         simple_class = all( | ||||
|             [b._meta.get("abstract") for b in flattened_bases if hasattr(b, "_meta")] | ||||
|         ) | ||||
|         simple_class = all([b._meta.get('abstract') | ||||
|                             for b in flattened_bases if hasattr(b, '_meta')]) | ||||
|         if ( | ||||
|             not simple_class | ||||
|             and meta["allow_inheritance"] is False | ||||
|             and not meta["abstract"] | ||||
|             not simple_class and | ||||
|             meta['allow_inheritance'] is False and | ||||
|             not meta['abstract'] | ||||
|         ): | ||||
|             raise ValueError( | ||||
|                 "Only direct subclasses of Document may set " | ||||
|                 '"allow_inheritance" to False' | ||||
|             ) | ||||
|             raise ValueError('Only direct subclasses of Document may set ' | ||||
|                              '"allow_inheritance" to False') | ||||
|  | ||||
|         # Set default collection name | ||||
|         if "collection" not in meta: | ||||
|             meta["collection"] = ( | ||||
|                 "".join("_%s" % c if c.isupper() else c for c in name) | ||||
|                 .strip("_") | ||||
|                 .lower() | ||||
|             ) | ||||
|         attrs["_meta"] = meta | ||||
|         if 'collection' not in meta: | ||||
|             meta['collection'] = ''.join('_%s' % c if c.isupper() else c | ||||
|                                          for c in name).strip('_').lower() | ||||
|         attrs['_meta'] = meta | ||||
|  | ||||
|         # Call super and get the new class | ||||
|         new_class = super_new(mcs, name, bases, attrs) | ||||
|         new_class = super_new(cls, name, bases, attrs) | ||||
|  | ||||
|         meta = new_class._meta | ||||
|  | ||||
|         # Set index specifications | ||||
|         meta["index_specs"] = new_class._build_index_specs(meta["indexes"]) | ||||
|         meta['index_specs'] = new_class._build_index_specs(meta['indexes']) | ||||
|  | ||||
|         # If collection is a callable - call it and set the value | ||||
|         collection = meta.get("collection") | ||||
|         collection = meta.get('collection') | ||||
|         if callable(collection): | ||||
|             new_class._meta["collection"] = collection(new_class) | ||||
|             new_class._meta['collection'] = collection(new_class) | ||||
|  | ||||
|         # Provide a default queryset unless exists or one has been set | ||||
|         if "objects" not in dir(new_class): | ||||
|         if 'objects' not in dir(new_class): | ||||
|             new_class.objects = QuerySetManager() | ||||
|  | ||||
|         # Validate the fields and set primary key if needed | ||||
|         for field_name, field in iteritems(new_class._fields): | ||||
|         for field_name, field in new_class._fields.iteritems(): | ||||
|             if field.primary_key: | ||||
|                 # Ensure only one primary key is set | ||||
|                 current_pk = new_class._meta.get("id_field") | ||||
|                 current_pk = new_class._meta.get('id_field') | ||||
|                 if current_pk and current_pk != field_name: | ||||
|                     raise ValueError("Cannot override primary key field") | ||||
|                     raise ValueError('Cannot override primary key field') | ||||
|  | ||||
|                 # Set primary key | ||||
|                 if not current_pk: | ||||
|                     new_class._meta["id_field"] = field_name | ||||
|                     new_class._meta['id_field'] = field_name | ||||
|                     new_class.id = field | ||||
|  | ||||
|         # If the document doesn't explicitly define a primary key field, create | ||||
|         # one. Make it an ObjectIdField and give it a non-clashing name ("id" | ||||
|         # by default, but can be different if that one's taken). | ||||
|         if not new_class._meta.get("id_field"): | ||||
|             id_name, id_db_name = mcs.get_auto_id_names(new_class) | ||||
|             new_class._meta["id_field"] = id_name | ||||
|         # Set primary key if not defined by the document | ||||
|         new_class._auto_id_field = getattr(parent_doc_cls, | ||||
|                                            '_auto_id_field', False) | ||||
|         if not new_class._meta.get('id_field'): | ||||
|             # After 0.10, find not existing names, instead of overwriting | ||||
|             id_name, id_db_name = cls.get_auto_id_names(new_class) | ||||
|             new_class._auto_id_field = True | ||||
|             new_class._meta['id_field'] = id_name | ||||
|             new_class._fields[id_name] = ObjectIdField(db_field=id_db_name) | ||||
|             new_class._fields[id_name].name = id_name | ||||
|             new_class.id = new_class._fields[id_name] | ||||
|             new_class._db_field_map[id_name] = id_db_name | ||||
|             new_class._reverse_db_field_map[id_db_name] = id_name | ||||
|             # Prepend id field to _fields_ordered | ||||
|             new_class._fields_ordered = (id_name, ) + new_class._fields_ordered | ||||
|  | ||||
|             # Prepend the ID field to _fields_ordered (so that it's *always* | ||||
|             # the first field). | ||||
|             new_class._fields_ordered = (id_name,) + new_class._fields_ordered | ||||
|  | ||||
|         # Merge in exceptions with parent hierarchy. | ||||
|         # Merge in exceptions with parent hierarchy | ||||
|         exceptions_to_merge = (DoesNotExist, MultipleObjectsReturned) | ||||
|         module = attrs.get("__module__") | ||||
|         module = attrs.get('__module__') | ||||
|         for exc in exceptions_to_merge: | ||||
|             name = exc.__name__ | ||||
|             parents = tuple( | ||||
|                 getattr(base, name) for base in flattened_bases if hasattr(base, name) | ||||
|             ) or (exc,) | ||||
|  | ||||
|             # Create a new exception and set it as an attribute on the new | ||||
|             # class. | ||||
|             exception = type(name, parents, {"__module__": module}) | ||||
|             parents = tuple(getattr(base, name) for base in flattened_bases | ||||
|                             if hasattr(base, name)) or (exc,) | ||||
|             # Create new exception and set to new_class | ||||
|             exception = type(name, parents, {'__module__': module}) | ||||
|             setattr(new_class, name, exception) | ||||
|  | ||||
|         return new_class | ||||
|  | ||||
|     @classmethod | ||||
|     def get_auto_id_names(mcs, new_class): | ||||
|         """Find a name for the automatic ID field for the given new class. | ||||
|  | ||||
|         Return a two-element tuple where the first item is the field name (i.e. | ||||
|         the attribute name on the object) and the second element is the DB | ||||
|         field name (i.e. the name of the key stored in MongoDB). | ||||
|  | ||||
|         Defaults to ('id', '_id'), or generates a non-clashing name in the form | ||||
|         of ('auto_id_X', '_auto_id_X') if the default name is already taken. | ||||
|         """ | ||||
|         id_name, id_db_name = ("id", "_id") | ||||
|         existing_fields = {field_name for field_name in new_class._fields} | ||||
|         existing_db_fields = {v.db_field for v in new_class._fields.values()} | ||||
|         if id_name not in existing_fields and id_db_name not in existing_db_fields: | ||||
|     def get_auto_id_names(cls, new_class): | ||||
|         id_name, id_db_name = ('id', '_id') | ||||
|         if id_name not in new_class._fields and \ | ||||
|                 id_db_name not in (v.db_field for v in new_class._fields.values()): | ||||
|             return id_name, id_db_name | ||||
|  | ||||
|         id_basename, id_db_basename, i = ("auto_id", "_auto_id", 0) | ||||
|         for i in itertools.count(): | ||||
|             id_name = "{0}_{1}".format(id_basename, i) | ||||
|             id_db_name = "{0}_{1}".format(id_db_basename, i) | ||||
|             if id_name not in existing_fields and id_db_name not in existing_db_fields: | ||||
|                 return id_name, id_db_name | ||||
|         id_basename, id_db_basename, i = 'auto_id', '_auto_id', 0 | ||||
|         while id_name in new_class._fields or \ | ||||
|                 id_db_name in (v.db_field for v in new_class._fields.values()): | ||||
|             id_name = '{0}_{1}'.format(id_basename, i) | ||||
|             id_db_name = '{0}_{1}'.format(id_db_basename, i) | ||||
|             i += 1 | ||||
|         return id_name, id_db_name | ||||
|  | ||||
|  | ||||
| class MetaDict(dict): | ||||
|     """Custom dictionary for meta classes. | ||||
|     Handles the merging of set indexes | ||||
|     """ | ||||
|  | ||||
|     _merge_options = ("indexes",) | ||||
|     _merge_options = ('indexes',) | ||||
|  | ||||
|     def merge(self, new_options): | ||||
|         for k, v in iteritems(new_options): | ||||
|         for k, v in new_options.iteritems(): | ||||
|             if k in self._merge_options: | ||||
|                 self[k] = self.get(k, []) + v | ||||
|             else: | ||||
| @@ -485,5 +449,4 @@ class MetaDict(dict): | ||||
|  | ||||
| class BasesTuple(tuple): | ||||
|     """Special class to handle introspection of bases tuple in __new__""" | ||||
|  | ||||
|     pass | ||||
|   | ||||
| @@ -1,22 +0,0 @@ | ||||
| import re | ||||
|  | ||||
|  | ||||
| class LazyRegexCompiler(object): | ||||
|     """Descriptor to allow lazy compilation of regex""" | ||||
|  | ||||
|     def __init__(self, pattern, flags=0): | ||||
|         self._pattern = pattern | ||||
|         self._flags = flags | ||||
|         self._compiled_regex = None | ||||
|  | ||||
|     @property | ||||
|     def compiled_regex(self): | ||||
|         if self._compiled_regex is None: | ||||
|             self._compiled_regex = re.compile(self._pattern, self._flags) | ||||
|         return self._compiled_regex | ||||
|  | ||||
|     def __get__(self, instance, owner): | ||||
|         return self.compiled_regex | ||||
|  | ||||
|     def __set__(self, instance, value): | ||||
|         raise AttributeError("Can not set attribute LazyRegexCompiler") | ||||
| @@ -19,44 +19,38 @@ def _import_class(cls_name): | ||||
|     if cls_name in _class_registry_cache: | ||||
|         return _class_registry_cache.get(cls_name) | ||||
|  | ||||
|     doc_classes = ( | ||||
|         "Document", | ||||
|         "DynamicEmbeddedDocument", | ||||
|         "EmbeddedDocument", | ||||
|         "MapReduceDocument", | ||||
|     ) | ||||
|     doc_classes = ('Document', 'DynamicEmbeddedDocument', 'EmbeddedDocument', | ||||
|                    'MapReduceDocument') | ||||
|  | ||||
|     # Field Classes | ||||
|     if not _field_list_cache: | ||||
|         from mongoengine.fields import __all__ as fields | ||||
|  | ||||
|         _field_list_cache.extend(fields) | ||||
|         from mongoengine.base.fields import __all__ as fields | ||||
|  | ||||
|         _field_list_cache.extend(fields) | ||||
|  | ||||
|     field_classes = _field_list_cache | ||||
|  | ||||
|     deref_classes = ("DeReference",) | ||||
|     queryset_classes = ('OperationError',) | ||||
|     deref_classes = ('DeReference',) | ||||
|  | ||||
|     if cls_name == "BaseDocument": | ||||
|     if cls_name == 'BaseDocument': | ||||
|         from mongoengine.base import document as module | ||||
|  | ||||
|         import_classes = ["BaseDocument"] | ||||
|         import_classes = ['BaseDocument'] | ||||
|     elif cls_name in doc_classes: | ||||
|         from mongoengine import document as module | ||||
|  | ||||
|         import_classes = doc_classes | ||||
|     elif cls_name in field_classes: | ||||
|         from mongoengine import fields as module | ||||
|  | ||||
|         import_classes = field_classes | ||||
|     elif cls_name in queryset_classes: | ||||
|         from mongoengine import queryset as module | ||||
|         import_classes = queryset_classes | ||||
|     elif cls_name in deref_classes: | ||||
|         from mongoengine import dereference as module | ||||
|  | ||||
|         import_classes = deref_classes | ||||
|     else: | ||||
|         raise ValueError("No import set for: %s" % cls_name) | ||||
|         raise ValueError('No import set for: ' % cls_name) | ||||
|  | ||||
|     for cls in import_classes: | ||||
|         _class_registry_cache[cls] = getattr(module, cls) | ||||
|   | ||||
| @@ -1,96 +1,74 @@ | ||||
| from pymongo import MongoClient, ReadPreference, uri_parser | ||||
| from pymongo.database import _check_name | ||||
| import six | ||||
|  | ||||
| __all__ = [ | ||||
|     "DEFAULT_CONNECTION_NAME", | ||||
|     "DEFAULT_DATABASE_NAME", | ||||
|     "ConnectionFailure", | ||||
|     "connect", | ||||
|     "disconnect", | ||||
|     "disconnect_all", | ||||
|     "get_connection", | ||||
|     "get_db", | ||||
|     "register_connection", | ||||
| ] | ||||
| from mongoengine.python_support import IS_PYMONGO_3 | ||||
|  | ||||
| __all__ = ['MongoEngineConnectionError', 'connect', 'register_connection', | ||||
|            'DEFAULT_CONNECTION_NAME'] | ||||
|  | ||||
|  | ||||
| DEFAULT_CONNECTION_NAME = "default" | ||||
| DEFAULT_DATABASE_NAME = "test" | ||||
| DEFAULT_HOST = "localhost" | ||||
| DEFAULT_PORT = 27017 | ||||
| DEFAULT_CONNECTION_NAME = 'default' | ||||
|  | ||||
| if IS_PYMONGO_3: | ||||
|     READ_PREFERENCE = ReadPreference.PRIMARY | ||||
| else: | ||||
|     from pymongo import MongoReplicaSetClient | ||||
|     READ_PREFERENCE = False | ||||
|  | ||||
|  | ||||
| class MongoEngineConnectionError(Exception): | ||||
|     """Error raised when the database connection can't be established or | ||||
|     when a connection with a requested alias can't be retrieved. | ||||
|     """ | ||||
|     pass | ||||
|  | ||||
|  | ||||
| _connection_settings = {} | ||||
| _connections = {} | ||||
| _dbs = {} | ||||
|  | ||||
| READ_PREFERENCE = ReadPreference.PRIMARY | ||||
|  | ||||
| def register_connection(alias, name=None, host=None, port=None, | ||||
|                         read_preference=READ_PREFERENCE, | ||||
|                         username=None, password=None, | ||||
|                         authentication_source=None, | ||||
|                         authentication_mechanism=None, | ||||
|                         **kwargs): | ||||
|     """Add a connection. | ||||
|  | ||||
| class ConnectionFailure(Exception): | ||||
|     """Error raised when the database connection can't be established or | ||||
|     when a connection with a requested alias can't be retrieved. | ||||
|     """ | ||||
|  | ||||
|     pass | ||||
|  | ||||
|  | ||||
| def _check_db_name(name): | ||||
|     """Check if a database name is valid. | ||||
|     This functionality is copied from pymongo Database class constructor. | ||||
|     """ | ||||
|     if not isinstance(name, six.string_types): | ||||
|         raise TypeError("name must be an instance of %s" % six.string_types) | ||||
|     elif name != "$external": | ||||
|         _check_name(name) | ||||
|  | ||||
|  | ||||
| def _get_connection_settings( | ||||
|     db=None, | ||||
|     name=None, | ||||
|     host=None, | ||||
|     port=None, | ||||
|     read_preference=READ_PREFERENCE, | ||||
|     username=None, | ||||
|     password=None, | ||||
|     authentication_source=None, | ||||
|     authentication_mechanism=None, | ||||
|     **kwargs | ||||
| ): | ||||
|     """Get the connection settings as a dict | ||||
|  | ||||
|     : param db: the name of the database to use, for compatibility with connect | ||||
|     : param name: the name of the specific database to use | ||||
|     : param host: the host name of the: program: `mongod` instance to connect to | ||||
|     : param port: the port that the: program: `mongod` instance is running on | ||||
|     : param read_preference: The read preference for the collection | ||||
|     : param username: username to authenticate with | ||||
|     : param password: password to authenticate with | ||||
|     : param authentication_source: database to authenticate against | ||||
|     : param authentication_mechanism: database authentication mechanisms. | ||||
|     :param alias: the name that will be used to refer to this connection | ||||
|         throughout MongoEngine | ||||
|     :param name: the name of the specific database to use | ||||
|     :param host: the host name of the :program:`mongod` instance to connect to | ||||
|     :param port: the port that the :program:`mongod` instance is running on | ||||
|     :param read_preference: The read preference for the collection | ||||
|        ** Added pymongo 2.1 | ||||
|     :param username: username to authenticate with | ||||
|     :param password: password to authenticate with | ||||
|     :param authentication_source: database to authenticate against | ||||
|     :param authentication_mechanism: database authentication mechanisms. | ||||
|         By default, use SCRAM-SHA-1 with MongoDB 3.0 and later, | ||||
|         MONGODB-CR (MongoDB Challenge Response protocol) for older servers. | ||||
|     : param is_mock: explicitly use mongomock for this connection | ||||
|         (can also be done by using `mongomock: // ` as db host prefix) | ||||
|     : param kwargs: ad-hoc parameters to be passed into the pymongo driver, | ||||
|     :param is_mock: explicitly use mongomock for this connection | ||||
|         (can also be done by using `mongomock://` as db host prefix) | ||||
|     :param kwargs: ad-hoc parameters to be passed into the pymongo driver, | ||||
|         for example maxpoolsize, tz_aware, etc. See the documentation | ||||
|         for pymongo's `MongoClient` for a full list. | ||||
|  | ||||
|     .. versionchanged:: 0.10.6 - added mongomock support | ||||
|     """ | ||||
|     conn_settings = { | ||||
|         "name": name or db or DEFAULT_DATABASE_NAME, | ||||
|         "host": host or DEFAULT_HOST, | ||||
|         "port": port or DEFAULT_PORT, | ||||
|         "read_preference": read_preference, | ||||
|         "username": username, | ||||
|         "password": password, | ||||
|         "authentication_source": authentication_source, | ||||
|         "authentication_mechanism": authentication_mechanism, | ||||
|         'name': name or 'test', | ||||
|         'host': host or 'localhost', | ||||
|         'port': port or 27017, | ||||
|         'read_preference': read_preference, | ||||
|         'username': username, | ||||
|         'password': password, | ||||
|         'authentication_source': authentication_source, | ||||
|         'authentication_mechanism': authentication_mechanism | ||||
|     } | ||||
|  | ||||
|     _check_db_name(conn_settings["name"]) | ||||
|     conn_host = conn_settings["host"] | ||||
|     conn_host = conn_settings['host'] | ||||
|  | ||||
|     # Host can be a list or a string, so if string, force to a list. | ||||
|     if isinstance(conn_host, six.string_types): | ||||
| @@ -100,152 +78,51 @@ def _get_connection_settings( | ||||
|     for entity in conn_host: | ||||
|  | ||||
|         # Handle Mongomock | ||||
|         if entity.startswith("mongomock://"): | ||||
|             conn_settings["is_mock"] = True | ||||
|         if entity.startswith('mongomock://'): | ||||
|             conn_settings['is_mock'] = True | ||||
|             # `mongomock://` is not a valid url prefix and must be replaced by `mongodb://` | ||||
|             new_entity = entity.replace("mongomock://", "mongodb://", 1) | ||||
|             resolved_hosts.append(new_entity) | ||||
|  | ||||
|             uri_dict = uri_parser.parse_uri(new_entity) | ||||
|  | ||||
|             database = uri_dict.get("database") | ||||
|             if database: | ||||
|                 conn_settings["name"] = database | ||||
|             resolved_hosts.append(entity.replace('mongomock://', 'mongodb://', 1)) | ||||
|  | ||||
|         # Handle URI style connections, only updating connection params which | ||||
|         # were explicitly specified in the URI. | ||||
|         elif "://" in entity: | ||||
|         elif '://' in entity: | ||||
|             uri_dict = uri_parser.parse_uri(entity) | ||||
|             resolved_hosts.append(entity) | ||||
|  | ||||
|             database = uri_dict.get("database") | ||||
|             if database: | ||||
|                 conn_settings["name"] = database | ||||
|             if uri_dict.get('database'): | ||||
|                 conn_settings['name'] = uri_dict.get('database') | ||||
|  | ||||
|             for param in ("read_preference", "username", "password"): | ||||
|             for param in ('read_preference', 'username', 'password'): | ||||
|                 if uri_dict.get(param): | ||||
|                     conn_settings[param] = uri_dict[param] | ||||
|  | ||||
|             uri_options = uri_dict["options"] | ||||
|             if "replicaset" in uri_options: | ||||
|                 conn_settings["replicaSet"] = uri_options["replicaset"] | ||||
|             if "authsource" in uri_options: | ||||
|                 conn_settings["authentication_source"] = uri_options["authsource"] | ||||
|             if "authmechanism" in uri_options: | ||||
|                 conn_settings["authentication_mechanism"] = uri_options["authmechanism"] | ||||
|             if "readpreference" in uri_options: | ||||
|                 read_preferences = ( | ||||
|                     ReadPreference.NEAREST, | ||||
|                     ReadPreference.PRIMARY, | ||||
|                     ReadPreference.PRIMARY_PREFERRED, | ||||
|                     ReadPreference.SECONDARY, | ||||
|                     ReadPreference.SECONDARY_PREFERRED, | ||||
|                 ) | ||||
|  | ||||
|                 # Starting with PyMongo v3.5, the "readpreference" option is | ||||
|                 # returned as a string (e.g. "secondaryPreferred") and not an | ||||
|                 # int (e.g. 3). | ||||
|                 # TODO simplify the code below once we drop support for | ||||
|                 # PyMongo v3.4. | ||||
|                 read_pf_mode = uri_options["readpreference"] | ||||
|                 if isinstance(read_pf_mode, six.string_types): | ||||
|                     read_pf_mode = read_pf_mode.lower() | ||||
|                 for preference in read_preferences: | ||||
|                     if ( | ||||
|                         preference.name.lower() == read_pf_mode | ||||
|                         or preference.mode == read_pf_mode | ||||
|                     ): | ||||
|                         conn_settings["read_preference"] = preference | ||||
|                         break | ||||
|             uri_options = uri_dict['options'] | ||||
|             if 'replicaset' in uri_options: | ||||
|                 conn_settings['replicaSet'] = uri_options['replicaset'] | ||||
|             if 'authsource' in uri_options: | ||||
|                 conn_settings['authentication_source'] = uri_options['authsource'] | ||||
|             if 'authmechanism' in uri_options: | ||||
|                 conn_settings['authentication_mechanism'] = uri_options['authmechanism'] | ||||
|         else: | ||||
|             resolved_hosts.append(entity) | ||||
|     conn_settings["host"] = resolved_hosts | ||||
|     conn_settings['host'] = resolved_hosts | ||||
|  | ||||
|     # Deprecated parameters that should not be passed on | ||||
|     kwargs.pop("slaves", None) | ||||
|     kwargs.pop("is_slave", None) | ||||
|     kwargs.pop('slaves', None) | ||||
|     kwargs.pop('is_slave', None) | ||||
|  | ||||
|     conn_settings.update(kwargs) | ||||
|     return conn_settings | ||||
|  | ||||
|  | ||||
| def register_connection( | ||||
|     alias, | ||||
|     db=None, | ||||
|     name=None, | ||||
|     host=None, | ||||
|     port=None, | ||||
|     read_preference=READ_PREFERENCE, | ||||
|     username=None, | ||||
|     password=None, | ||||
|     authentication_source=None, | ||||
|     authentication_mechanism=None, | ||||
|     **kwargs | ||||
| ): | ||||
|     """Register the connection settings. | ||||
|  | ||||
|     : param alias: the name that will be used to refer to this connection | ||||
|         throughout MongoEngine | ||||
|     : param db: the name of the database to use, for compatibility with connect | ||||
|     : param name: the name of the specific database to use | ||||
|     : param host: the host name of the: program: `mongod` instance to connect to | ||||
|     : param port: the port that the: program: `mongod` instance is running on | ||||
|     : param read_preference: The read preference for the collection | ||||
|     : param username: username to authenticate with | ||||
|     : param password: password to authenticate with | ||||
|     : param authentication_source: database to authenticate against | ||||
|     : param authentication_mechanism: database authentication mechanisms. | ||||
|         By default, use SCRAM-SHA-1 with MongoDB 3.0 and later, | ||||
|         MONGODB-CR (MongoDB Challenge Response protocol) for older servers. | ||||
|     : param is_mock: explicitly use mongomock for this connection | ||||
|         (can also be done by using `mongomock: // ` as db host prefix) | ||||
|     : param kwargs: ad-hoc parameters to be passed into the pymongo driver, | ||||
|         for example maxpoolsize, tz_aware, etc. See the documentation | ||||
|         for pymongo's `MongoClient` for a full list. | ||||
|  | ||||
|     .. versionchanged:: 0.10.6 - added mongomock support | ||||
|     """ | ||||
|     conn_settings = _get_connection_settings( | ||||
|         db=db, | ||||
|         name=name, | ||||
|         host=host, | ||||
|         port=port, | ||||
|         read_preference=read_preference, | ||||
|         username=username, | ||||
|         password=password, | ||||
|         authentication_source=authentication_source, | ||||
|         authentication_mechanism=authentication_mechanism, | ||||
|         **kwargs | ||||
|     ) | ||||
|     _connection_settings[alias] = conn_settings | ||||
|  | ||||
|  | ||||
| def disconnect(alias=DEFAULT_CONNECTION_NAME): | ||||
|     """Close the connection with a given alias.""" | ||||
|     from mongoengine.base.common import _get_documents_by_db | ||||
|     from mongoengine import Document | ||||
|  | ||||
|     if alias in _connections: | ||||
|         get_connection(alias=alias).close() | ||||
|         del _connections[alias] | ||||
|  | ||||
|     if alias in _dbs: | ||||
|         # Detach all cached collections in Documents | ||||
|         for doc_cls in _get_documents_by_db(alias, DEFAULT_CONNECTION_NAME): | ||||
|             if issubclass(doc_cls, Document):  # Skip EmbeddedDocument | ||||
|                 doc_cls._disconnect() | ||||
|  | ||||
|         del _dbs[alias] | ||||
|  | ||||
|     if alias in _connection_settings: | ||||
|         del _connection_settings[alias] | ||||
|  | ||||
|  | ||||
| def disconnect_all(): | ||||
|     """Close all registered database.""" | ||||
|     for alias in list(_connections.keys()): | ||||
|         disconnect(alias) | ||||
|  | ||||
|  | ||||
| def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | ||||
|     """Return a connection with a given alias.""" | ||||
| @@ -260,93 +137,84 @@ def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | ||||
|         return _connections[alias] | ||||
|  | ||||
|     # Validate that the requested alias exists in the _connection_settings. | ||||
|     # Raise ConnectionFailure if it doesn't. | ||||
|     # Raise MongoEngineConnectionError if it doesn't. | ||||
|     if alias not in _connection_settings: | ||||
|         if alias == DEFAULT_CONNECTION_NAME: | ||||
|             msg = "You have not defined a default connection" | ||||
|             msg = 'You have not defined a default connection' | ||||
|         else: | ||||
|             msg = 'Connection with alias "%s" has not been defined' % alias | ||||
|         raise ConnectionFailure(msg) | ||||
|         raise MongoEngineConnectionError(msg) | ||||
|  | ||||
|     def _clean_settings(settings_dict): | ||||
|         irrelevant_fields_set = { | ||||
|             "name", | ||||
|             "username", | ||||
|             "password", | ||||
|             "authentication_source", | ||||
|             "authentication_mechanism", | ||||
|         } | ||||
|         irrelevant_fields = set([ | ||||
|             'name', 'username', 'password', 'authentication_source', | ||||
|             'authentication_mechanism' | ||||
|         ]) | ||||
|         return { | ||||
|             k: v for k, v in settings_dict.items() if k not in irrelevant_fields_set | ||||
|             k: v for k, v in settings_dict.items() | ||||
|             if k not in irrelevant_fields | ||||
|         } | ||||
|  | ||||
|     raw_conn_settings = _connection_settings[alias].copy() | ||||
|  | ||||
|     # Retrieve a copy of the connection settings associated with the requested | ||||
|     # alias and remove the database name and authentication info (we don't | ||||
|     # care about them at this point). | ||||
|     conn_settings = _clean_settings(raw_conn_settings) | ||||
|     conn_settings = _clean_settings(_connection_settings[alias].copy()) | ||||
|  | ||||
|     # Determine if we should use PyMongo's or mongomock's MongoClient. | ||||
|     is_mock = conn_settings.pop("is_mock", False) | ||||
|     is_mock = conn_settings.pop('is_mock', False) | ||||
|     if is_mock: | ||||
|         try: | ||||
|             import mongomock | ||||
|         except ImportError: | ||||
|             raise RuntimeError("You need mongomock installed to mock MongoEngine.") | ||||
|             raise RuntimeError('You need mongomock installed to mock ' | ||||
|                                'MongoEngine.') | ||||
|         connection_class = mongomock.MongoClient | ||||
|     else: | ||||
|         connection_class = MongoClient | ||||
|  | ||||
|     # Re-use existing connection if one is suitable. | ||||
|     existing_connection = _find_existing_connection(raw_conn_settings) | ||||
|     if existing_connection: | ||||
|         connection = existing_connection | ||||
|     else: | ||||
|         connection = _create_connection( | ||||
|             alias=alias, connection_class=connection_class, **conn_settings | ||||
|         ) | ||||
|     _connections[alias] = connection | ||||
|     return _connections[alias] | ||||
|         # For replica set connections with PyMongo 2.x, use | ||||
|         # MongoReplicaSetClient. | ||||
|         # TODO remove this once we stop supporting PyMongo 2.x. | ||||
|         if 'replicaSet' in conn_settings and not IS_PYMONGO_3: | ||||
|             connection_class = MongoReplicaSetClient | ||||
|             conn_settings['hosts_or_uri'] = conn_settings.pop('host', None) | ||||
|  | ||||
|             # hosts_or_uri has to be a string, so if 'host' was provided | ||||
|             # as a list, join its parts and separate them by ',' | ||||
|             if isinstance(conn_settings['hosts_or_uri'], list): | ||||
|                 conn_settings['hosts_or_uri'] = ','.join( | ||||
|                     conn_settings['hosts_or_uri']) | ||||
|  | ||||
| def _create_connection(alias, connection_class, **connection_settings): | ||||
|     """ | ||||
|     Create the new connection for this alias. Raise | ||||
|     ConnectionFailure if it can't be established. | ||||
|     """ | ||||
|     try: | ||||
|         return connection_class(**connection_settings) | ||||
|     except Exception as e: | ||||
|         raise ConnectionFailure("Cannot connect to database %s :\n%s" % (alias, e)) | ||||
|             # Discard port since it can't be used on MongoReplicaSetClient | ||||
|             conn_settings.pop('port', None) | ||||
|  | ||||
|  | ||||
| def _find_existing_connection(connection_settings): | ||||
|     """ | ||||
|     Check if an existing connection could be reused | ||||
|  | ||||
|     Iterate over all of the connection settings and if an existing connection | ||||
|     with the same parameters is suitable, return it | ||||
|  | ||||
|     :param connection_settings: the settings of the new connection | ||||
|     :return: An existing connection or None | ||||
|     """ | ||||
|     connection_settings_bis = ( | ||||
|     # Iterate over all of the connection settings and if a connection with | ||||
|     # the same parameters is already established, use it instead of creating | ||||
|     # a new one. | ||||
|     existing_connection = None | ||||
|     connection_settings_iterator = ( | ||||
|         (db_alias, settings.copy()) | ||||
|         for db_alias, settings in _connection_settings.items() | ||||
|     ) | ||||
|     for db_alias, connection_settings in connection_settings_iterator: | ||||
|         connection_settings = _clean_settings(connection_settings) | ||||
|         if conn_settings == connection_settings and _connections.get(db_alias): | ||||
|             existing_connection = _connections[db_alias] | ||||
|             break | ||||
|  | ||||
|     def _clean_settings(settings_dict): | ||||
|         # Only remove the name but it's important to | ||||
|         # keep the username/password/authentication_source/authentication_mechanism | ||||
|         # to identify if the connection could be shared (cfr https://github.com/MongoEngine/mongoengine/issues/2047) | ||||
|         return {k: v for k, v in settings_dict.items() if k != "name"} | ||||
|     # If an existing connection was found, assign it to the new alias | ||||
|     if existing_connection: | ||||
|         _connections[alias] = existing_connection | ||||
|     else: | ||||
|         # Otherwise, create the new connection for this alias. Raise | ||||
|         # MongoEngineConnectionError if it can't be established. | ||||
|         try: | ||||
|             _connections[alias] = connection_class(**conn_settings) | ||||
|         except Exception as e: | ||||
|             raise MongoEngineConnectionError( | ||||
|                 'Cannot connect to database %s :\n%s' % (alias, e)) | ||||
|  | ||||
|     cleaned_conn_settings = _clean_settings(connection_settings) | ||||
|     for db_alias, connection_settings in connection_settings_bis: | ||||
|         db_conn_settings = _clean_settings(connection_settings) | ||||
|         if cleaned_conn_settings == db_conn_settings and _connections.get(db_alias): | ||||
|             return _connections[db_alias] | ||||
|     return _connections[alias] | ||||
|  | ||||
|  | ||||
| def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | ||||
| @@ -356,18 +224,14 @@ def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | ||||
|     if alias not in _dbs: | ||||
|         conn = get_connection(alias) | ||||
|         conn_settings = _connection_settings[alias] | ||||
|         db = conn[conn_settings["name"]] | ||||
|         auth_kwargs = {"source": conn_settings["authentication_source"]} | ||||
|         if conn_settings["authentication_mechanism"] is not None: | ||||
|             auth_kwargs["mechanism"] = conn_settings["authentication_mechanism"] | ||||
|         db = conn[conn_settings['name']] | ||||
|         auth_kwargs = {'source': conn_settings['authentication_source']} | ||||
|         if conn_settings['authentication_mechanism'] is not None: | ||||
|             auth_kwargs['mechanism'] = conn_settings['authentication_mechanism'] | ||||
|         # Authenticate if necessary | ||||
|         if conn_settings["username"] and ( | ||||
|             conn_settings["password"] | ||||
|             or conn_settings["authentication_mechanism"] == "MONGODB-X509" | ||||
|         ): | ||||
|             db.authenticate( | ||||
|                 conn_settings["username"], conn_settings["password"], **auth_kwargs | ||||
|             ) | ||||
|         if conn_settings['username'] and (conn_settings['password'] or | ||||
|                                           conn_settings['authentication_mechanism'] == 'MONGODB-X509'): | ||||
|             db.authenticate(conn_settings['username'], conn_settings['password'], **auth_kwargs) | ||||
|         _dbs[alias] = db | ||||
|     return _dbs[alias] | ||||
|  | ||||
| @@ -380,27 +244,14 @@ def connect(db=None, alias=DEFAULT_CONNECTION_NAME, **kwargs): | ||||
|     provide username and password arguments as well. | ||||
|  | ||||
|     Multiple databases are supported by using aliases. Provide a separate | ||||
|     `alias` to connect to a different instance of: program: `mongod`. | ||||
|  | ||||
|     In order to replace a connection identified by a given alias, you'll | ||||
|     need to call ``disconnect`` first | ||||
|     `alias` to connect to a different instance of :program:`mongod`. | ||||
|  | ||||
|     See the docstring for `register_connection` for more details about all | ||||
|     supported kwargs. | ||||
|  | ||||
|     .. versionchanged:: 0.6 - added multiple database support. | ||||
|     """ | ||||
|     if alias in _connections: | ||||
|         prev_conn_setting = _connection_settings[alias] | ||||
|         new_conn_settings = _get_connection_settings(db, **kwargs) | ||||
|  | ||||
|         if new_conn_settings != prev_conn_setting: | ||||
|             err_msg = ( | ||||
|                 u"A different connection with alias `{}` was already " | ||||
|                 u"registered. Use disconnect() first" | ||||
|             ).format(alias) | ||||
|             raise ConnectionFailure(err_msg) | ||||
|     else: | ||||
|     if alias not in _connections: | ||||
|         register_connection(alias, db, **kwargs) | ||||
|  | ||||
|     return get_connection(alias) | ||||
|   | ||||
| @@ -1,20 +1,9 @@ | ||||
| from contextlib import contextmanager | ||||
|  | ||||
| from pymongo.write_concern import WriteConcern | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.connection import DEFAULT_CONNECTION_NAME, get_db | ||||
| from mongoengine.pymongo_support import count_documents | ||||
|  | ||||
| __all__ = ( | ||||
|     "switch_db", | ||||
|     "switch_collection", | ||||
|     "no_dereference", | ||||
|     "no_sub_classes", | ||||
|     "query_counter", | ||||
|     "set_write_concern", | ||||
| ) | ||||
|  | ||||
| __all__ = ('switch_db', 'switch_collection', 'no_dereference', | ||||
|            'no_sub_classes', 'query_counter') | ||||
|  | ||||
|  | ||||
| class switch_db(object): | ||||
| @@ -44,17 +33,17 @@ class switch_db(object): | ||||
|         self.cls = cls | ||||
|         self.collection = cls._get_collection() | ||||
|         self.db_alias = db_alias | ||||
|         self.ori_db_alias = cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME) | ||||
|         self.ori_db_alias = cls._meta.get('db_alias', DEFAULT_CONNECTION_NAME) | ||||
|  | ||||
|     def __enter__(self): | ||||
|         """Change the db_alias and clear the cached collection.""" | ||||
|         self.cls._meta["db_alias"] = self.db_alias | ||||
|         self.cls._meta['db_alias'] = self.db_alias | ||||
|         self.cls._collection = None | ||||
|         return self.cls | ||||
|  | ||||
|     def __exit__(self, t, value, traceback): | ||||
|         """Reset the db_alias and collection.""" | ||||
|         self.cls._meta["db_alias"] = self.ori_db_alias | ||||
|         self.cls._meta['db_alias'] = self.ori_db_alias | ||||
|         self.cls._collection = self.collection | ||||
|  | ||||
|  | ||||
| @@ -117,15 +106,14 @@ class no_dereference(object): | ||||
|         """ | ||||
|         self.cls = cls | ||||
|  | ||||
|         ReferenceField = _import_class("ReferenceField") | ||||
|         GenericReferenceField = _import_class("GenericReferenceField") | ||||
|         ComplexBaseField = _import_class("ComplexBaseField") | ||||
|         ReferenceField = _import_class('ReferenceField') | ||||
|         GenericReferenceField = _import_class('GenericReferenceField') | ||||
|         ComplexBaseField = _import_class('ComplexBaseField') | ||||
|  | ||||
|         self.deref_fields = [ | ||||
|             k | ||||
|             for k, v in iteritems(self.cls._fields) | ||||
|             if isinstance(v, (ReferenceField, GenericReferenceField, ComplexBaseField)) | ||||
|         ] | ||||
|         self.deref_fields = [k for k, v in self.cls._fields.iteritems() | ||||
|                              if isinstance(v, (ReferenceField, | ||||
|                                                GenericReferenceField, | ||||
|                                                ComplexBaseField))] | ||||
|  | ||||
|     def __enter__(self): | ||||
|         """Change the objects default and _auto_dereference values.""" | ||||
| @@ -155,82 +143,66 @@ class no_sub_classes(object): | ||||
|         :param cls: the class to turn querying sub classes on | ||||
|         """ | ||||
|         self.cls = cls | ||||
|         self.cls_initial_subclasses = None | ||||
|  | ||||
|     def __enter__(self): | ||||
|         """Change the objects default and _auto_dereference values.""" | ||||
|         self.cls_initial_subclasses = self.cls._subclasses | ||||
|         self.cls._subclasses = (self.cls._class_name,) | ||||
|         self.cls._all_subclasses = self.cls._subclasses | ||||
|         self.cls._subclasses = (self.cls,) | ||||
|         return self.cls | ||||
|  | ||||
|     def __exit__(self, t, value, traceback): | ||||
|         """Reset the default and _auto_dereference values.""" | ||||
|         self.cls._subclasses = self.cls_initial_subclasses | ||||
|         self.cls._subclasses = self.cls._all_subclasses | ||||
|         delattr(self.cls, '_all_subclasses') | ||||
|         return self.cls | ||||
|  | ||||
|  | ||||
| class query_counter(object): | ||||
|     """Query_counter context manager to get the number of queries. | ||||
|     This works by updating the `profiling_level` of the database so that all queries get logged, | ||||
|     resetting the db.system.profile collection at the beginning of the context and counting the new entries. | ||||
|     """Query_counter context manager to get the number of queries.""" | ||||
|  | ||||
|     This was designed for debugging purpose. In fact it is a global counter so queries issued by other threads/processes | ||||
|     can interfere with it | ||||
|     def __init__(self): | ||||
|         """Construct the query_counter.""" | ||||
|         self.counter = 0 | ||||
|         self.db = get_db() | ||||
|  | ||||
|     Be aware that: | ||||
|     - Iterating over large amount of documents (>101) makes pymongo issue `getmore` queries to fetch the next batch of | ||||
|         documents (https://docs.mongodb.com/manual/tutorial/iterate-a-cursor/#cursor-batches) | ||||
|     - Some queries are ignored by default by the counter (killcursors, db.system.indexes) | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, alias=DEFAULT_CONNECTION_NAME): | ||||
|         """Construct the query_counter | ||||
|         """ | ||||
|         self.db = get_db(alias=alias) | ||||
|         self.initial_profiling_level = None | ||||
|         self._ctx_query_counter = 0  # number of queries issued by the context | ||||
|  | ||||
|         self._ignored_query = { | ||||
|             "ns": {"$ne": "%s.system.indexes" % self.db.name}, | ||||
|             "op": {"$ne": "killcursors"},  # MONGODB < 3.2 | ||||
|             "command.killCursors": {"$exists": False},  # MONGODB >= 3.2 | ||||
|         } | ||||
|  | ||||
|     def _turn_on_profiling(self): | ||||
|         self.initial_profiling_level = self.db.profiling_level() | ||||
|     def __enter__(self): | ||||
|         """On every with block we need to drop the profile collection.""" | ||||
|         self.db.set_profiling_level(0) | ||||
|         self.db.system.profile.drop() | ||||
|         self.db.set_profiling_level(2) | ||||
|  | ||||
|     def _resets_profiling(self): | ||||
|         self.db.set_profiling_level(self.initial_profiling_level) | ||||
|  | ||||
|     def __enter__(self): | ||||
|         self._turn_on_profiling() | ||||
|         return self | ||||
|  | ||||
|     def __exit__(self, t, value, traceback): | ||||
|         self._resets_profiling() | ||||
|         """Reset the profiling level.""" | ||||
|         self.db.set_profiling_level(0) | ||||
|  | ||||
|     def __eq__(self, value): | ||||
|         """== Compare querycounter.""" | ||||
|         counter = self._get_count() | ||||
|         return value == counter | ||||
|  | ||||
|     def __ne__(self, value): | ||||
|         """!= Compare querycounter.""" | ||||
|         return not self.__eq__(value) | ||||
|  | ||||
|     def __lt__(self, value): | ||||
|         """< Compare querycounter.""" | ||||
|         return self._get_count() < value | ||||
|  | ||||
|     def __le__(self, value): | ||||
|         """<= Compare querycounter.""" | ||||
|         return self._get_count() <= value | ||||
|  | ||||
|     def __gt__(self, value): | ||||
|         """> Compare querycounter.""" | ||||
|         return self._get_count() > value | ||||
|  | ||||
|     def __ge__(self, value): | ||||
|         """>= Compare querycounter.""" | ||||
|         return self._get_count() >= value | ||||
|  | ||||
|     def __int__(self): | ||||
|         """int representation.""" | ||||
|         return self._get_count() | ||||
|  | ||||
|     def __repr__(self): | ||||
| @@ -238,22 +210,8 @@ class query_counter(object): | ||||
|         return u"%s" % self._get_count() | ||||
|  | ||||
|     def _get_count(self): | ||||
|         """Get the number of queries by counting the current number of entries in db.system.profile | ||||
|         and substracting the queries issued by this context. In fact everytime this is called, 1 query is | ||||
|         issued so we need to balance that | ||||
|         """ | ||||
|         count = ( | ||||
|             count_documents(self.db.system.profile, self._ignored_query) | ||||
|             - self._ctx_query_counter | ||||
|         ) | ||||
|         self._ctx_query_counter += ( | ||||
|             1  # Account for the query we just issued to gather the information | ||||
|         ) | ||||
|         """Get the number of queries.""" | ||||
|         ignore_query = {'ns': {'$ne': '%s.system.indexes' % self.db.name}} | ||||
|         count = self.db.system.profile.find(ignore_query).count() - self.counter | ||||
|         self.counter += 1 | ||||
|         return count | ||||
|  | ||||
|  | ||||
| @contextmanager | ||||
| def set_write_concern(collection, write_concerns): | ||||
|     combined_concerns = dict(collection.write_concern.document.items()) | ||||
|     combined_concerns.update(write_concerns) | ||||
|     yield collection.with_options(write_concern=WriteConcern(**combined_concerns)) | ||||
|   | ||||
| @@ -1,15 +1,8 @@ | ||||
| from bson import DBRef, SON | ||||
| import six | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine.base import ( | ||||
|     BaseDict, | ||||
|     BaseList, | ||||
|     EmbeddedDocumentList, | ||||
|     TopLevelDocumentMetaclass, | ||||
|     get_document, | ||||
| ) | ||||
| from mongoengine.base.datastructures import LazyReference | ||||
| from mongoengine.base import (BaseDict, BaseList, EmbeddedDocumentList, | ||||
|                               TopLevelDocumentMetaclass, get_document) | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.document import Document, EmbeddedDocument | ||||
| from mongoengine.fields import DictField, ListField, MapField, ReferenceField | ||||
| @@ -41,59 +34,43 @@ class DeReference(object): | ||||
|         self.max_depth = max_depth | ||||
|         doc_type = None | ||||
|  | ||||
|         if instance and isinstance( | ||||
|             instance, (Document, EmbeddedDocument, TopLevelDocumentMetaclass) | ||||
|         ): | ||||
|         if instance and isinstance(instance, (Document, EmbeddedDocument, | ||||
|                                               TopLevelDocumentMetaclass)): | ||||
|             doc_type = instance._fields.get(name) | ||||
|             while hasattr(doc_type, "field"): | ||||
|             while hasattr(doc_type, 'field'): | ||||
|                 doc_type = doc_type.field | ||||
|  | ||||
|             if isinstance(doc_type, ReferenceField): | ||||
|                 field = doc_type | ||||
|                 doc_type = doc_type.document_type | ||||
|                 is_list = not hasattr(items, "items") | ||||
|                 is_list = not hasattr(items, 'items') | ||||
|  | ||||
|                 if is_list and all([i.__class__ == doc_type for i in items]): | ||||
|                     return items | ||||
|                 elif not is_list and all( | ||||
|                     [i.__class__ == doc_type for i in items.values()] | ||||
|                 ): | ||||
|                         [i.__class__ == doc_type for i in items.values()]): | ||||
|                     return items | ||||
|                 elif not field.dbref: | ||||
|                     # We must turn the ObjectIds into DBRefs | ||||
|                     if not hasattr(items, 'items'): | ||||
|  | ||||
|                     # Recursively dig into the sub items of a list/dict | ||||
|                     # to turn the ObjectIds into DBRefs | ||||
|                     def _get_items_from_list(items): | ||||
|                         new_items = [] | ||||
|                         for v in items: | ||||
|                             value = v | ||||
|                             if isinstance(v, dict): | ||||
|                                 value = _get_items_from_dict(v) | ||||
|                             elif isinstance(v, list): | ||||
|                                 value = _get_items_from_list(v) | ||||
|                             elif not isinstance(v, (DBRef, Document)): | ||||
|                                 value = field.to_python(v) | ||||
|                             new_items.append(value) | ||||
|                         return new_items | ||||
|                         def _get_items(items): | ||||
|                             new_items = [] | ||||
|                             for v in items: | ||||
|                                 if isinstance(v, list): | ||||
|                                     new_items.append(_get_items(v)) | ||||
|                                 elif not isinstance(v, (DBRef, Document)): | ||||
|                                     new_items.append(field.to_python(v)) | ||||
|                                 else: | ||||
|                                     new_items.append(v) | ||||
|                             return new_items | ||||
|  | ||||
|                     def _get_items_from_dict(items): | ||||
|                         new_items = {} | ||||
|                         for k, v in iteritems(items): | ||||
|                             value = v | ||||
|                             if isinstance(v, list): | ||||
|                                 value = _get_items_from_list(v) | ||||
|                             elif isinstance(v, dict): | ||||
|                                 value = _get_items_from_dict(v) | ||||
|                             elif not isinstance(v, (DBRef, Document)): | ||||
|                                 value = field.to_python(v) | ||||
|                             new_items[k] = value | ||||
|                         return new_items | ||||
|  | ||||
|                     if not hasattr(items, "items"): | ||||
|                         items = _get_items_from_list(items) | ||||
|                         items = _get_items(items) | ||||
|                     else: | ||||
|                         items = _get_items_from_dict(items) | ||||
|                         items = { | ||||
|                             k: (v if isinstance(v, (DBRef, Document)) | ||||
|                                 else field.to_python(v)) | ||||
|                             for k, v in items.iteritems() | ||||
|                         } | ||||
|  | ||||
|         self.reference_map = self._find_references(items) | ||||
|         self.object_map = self._fetch_objects(doc_type=doc_type) | ||||
| @@ -120,40 +97,26 @@ class DeReference(object): | ||||
|         depth += 1 | ||||
|         for item in iterator: | ||||
|             if isinstance(item, (Document, EmbeddedDocument)): | ||||
|                 for field_name, field in iteritems(item._fields): | ||||
|                 for field_name, field in item._fields.iteritems(): | ||||
|                     v = item._data.get(field_name, None) | ||||
|                     if isinstance(v, LazyReference): | ||||
|                         # LazyReference inherits DBRef but should not be dereferenced here ! | ||||
|                         continue | ||||
|                     elif isinstance(v, DBRef): | ||||
|                     if isinstance(v, DBRef): | ||||
|                         reference_map.setdefault(field.document_type, set()).add(v.id) | ||||
|                     elif isinstance(v, (dict, SON)) and "_ref" in v: | ||||
|                         reference_map.setdefault(get_document(v["_cls"]), set()).add( | ||||
|                             v["_ref"].id | ||||
|                         ) | ||||
|                     elif isinstance(v, (dict, SON)) and '_ref' in v: | ||||
|                         reference_map.setdefault(get_document(v['_cls']), set()).add(v['_ref'].id) | ||||
|                     elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||
|                         field_cls = getattr( | ||||
|                             getattr(field, "field", None), "document_type", None | ||||
|                         ) | ||||
|                         field_cls = getattr(getattr(field, 'field', None), 'document_type', None) | ||||
|                         references = self._find_references(v, depth) | ||||
|                         for key, refs in iteritems(references): | ||||
|                             if isinstance( | ||||
|                                 field_cls, (Document, TopLevelDocumentMetaclass) | ||||
|                             ): | ||||
|                         for key, refs in references.iteritems(): | ||||
|                             if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): | ||||
|                                 key = field_cls | ||||
|                             reference_map.setdefault(key, set()).update(refs) | ||||
|             elif isinstance(item, LazyReference): | ||||
|                 # LazyReference inherits DBRef but should not be dereferenced here ! | ||||
|                 continue | ||||
|             elif isinstance(item, DBRef): | ||||
|                 reference_map.setdefault(item.collection, set()).add(item.id) | ||||
|             elif isinstance(item, (dict, SON)) and "_ref" in item: | ||||
|                 reference_map.setdefault(get_document(item["_cls"]), set()).add( | ||||
|                     item["_ref"].id | ||||
|                 ) | ||||
|             elif isinstance(item, (dict, SON)) and '_ref' in item: | ||||
|                 reference_map.setdefault(get_document(item['_cls']), set()).add(item['_ref'].id) | ||||
|             elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: | ||||
|                 references = self._find_references(item, depth - 1) | ||||
|                 for key, refs in iteritems(references): | ||||
|                 for key, refs in references.iteritems(): | ||||
|                     reference_map.setdefault(key, set()).update(refs) | ||||
|  | ||||
|         return reference_map | ||||
| @@ -162,44 +125,35 @@ class DeReference(object): | ||||
|         """Fetch all references and convert to their document objects | ||||
|         """ | ||||
|         object_map = {} | ||||
|         for collection, dbrefs in iteritems(self.reference_map): | ||||
|  | ||||
|             # we use getattr instead of hasattr because hasattr swallows any exception under python2 | ||||
|             # so it could hide nasty things without raising exceptions (cfr bug #1688)) | ||||
|             ref_document_cls_exists = getattr(collection, "objects", None) is not None | ||||
|  | ||||
|             if ref_document_cls_exists: | ||||
|         for collection, dbrefs in self.reference_map.iteritems(): | ||||
|             if hasattr(collection, 'objects'):  # We have a document class for the refs | ||||
|                 col_name = collection._get_collection_name() | ||||
|                 refs = [ | ||||
|                     dbref for dbref in dbrefs if (col_name, dbref) not in object_map | ||||
|                 ] | ||||
|                 refs = [dbref for dbref in dbrefs | ||||
|                         if (col_name, dbref) not in object_map] | ||||
|                 references = collection.objects.in_bulk(refs) | ||||
|                 for key, doc in iteritems(references): | ||||
|                 for key, doc in references.iteritems(): | ||||
|                     object_map[(col_name, key)] = doc | ||||
|             else:  # Generic reference: use the refs data to convert to document | ||||
|                 if isinstance(doc_type, (ListField, DictField, MapField)): | ||||
|                 if isinstance(doc_type, (ListField, DictField, MapField,)): | ||||
|                     continue | ||||
|  | ||||
|                 refs = [ | ||||
|                     dbref for dbref in dbrefs if (collection, dbref) not in object_map | ||||
|                 ] | ||||
|                 refs = [dbref for dbref in dbrefs | ||||
|                         if (collection, dbref) not in object_map] | ||||
|  | ||||
|                 if doc_type: | ||||
|                     references = doc_type._get_db()[collection].find( | ||||
|                         {"_id": {"$in": refs}} | ||||
|                     ) | ||||
|                     references = doc_type._get_db()[collection].find({'_id': {'$in': refs}}) | ||||
|                     for ref in references: | ||||
|                         doc = doc_type._from_son(ref) | ||||
|                         object_map[(collection, doc.id)] = doc | ||||
|                 else: | ||||
|                     references = get_db()[collection].find({"_id": {"$in": refs}}) | ||||
|                     references = get_db()[collection].find({'_id': {'$in': refs}}) | ||||
|                     for ref in references: | ||||
|                         if "_cls" in ref: | ||||
|                             doc = get_document(ref["_cls"])._from_son(ref) | ||||
|                         if '_cls' in ref: | ||||
|                             doc = get_document(ref['_cls'])._from_son(ref) | ||||
|                         elif doc_type is None: | ||||
|                             doc = get_document( | ||||
|                                 "".join(x.capitalize() for x in collection.split("_")) | ||||
|                             )._from_son(ref) | ||||
|                                 ''.join(x.capitalize() | ||||
|                                         for x in collection.split('_')))._from_son(ref) | ||||
|                         else: | ||||
|                             doc = doc_type._from_son(ref) | ||||
|                         object_map[(collection, doc.id)] = doc | ||||
| @@ -227,20 +181,19 @@ class DeReference(object): | ||||
|                     return BaseList(items, instance, name) | ||||
|  | ||||
|         if isinstance(items, (dict, SON)): | ||||
|             if "_ref" in items: | ||||
|             if '_ref' in items: | ||||
|                 return self.object_map.get( | ||||
|                     (items["_ref"].collection, items["_ref"].id), items | ||||
|                 ) | ||||
|             elif "_cls" in items: | ||||
|                 doc = get_document(items["_cls"])._from_son(items) | ||||
|                 _cls = doc._data.pop("_cls", None) | ||||
|                 del items["_cls"] | ||||
|                     (items['_ref'].collection, items['_ref'].id), items) | ||||
|             elif '_cls' in items: | ||||
|                 doc = get_document(items['_cls'])._from_son(items) | ||||
|                 _cls = doc._data.pop('_cls', None) | ||||
|                 del items['_cls'] | ||||
|                 doc._data = self._attach_objects(doc._data, depth, doc, None) | ||||
|                 if _cls is not None: | ||||
|                     doc._data["_cls"] = _cls | ||||
|                     doc._data['_cls'] = _cls | ||||
|                 return doc | ||||
|  | ||||
|         if not hasattr(items, "items"): | ||||
|         if not hasattr(items, 'items'): | ||||
|             is_list = True | ||||
|             list_type = BaseList | ||||
|             if isinstance(items, EmbeddedDocumentList): | ||||
| @@ -250,7 +203,7 @@ class DeReference(object): | ||||
|             data = [] | ||||
|         else: | ||||
|             is_list = False | ||||
|             iterator = iteritems(items) | ||||
|             iterator = items.iteritems() | ||||
|             data = {} | ||||
|  | ||||
|         depth += 1 | ||||
| @@ -267,25 +220,17 @@ class DeReference(object): | ||||
|                     v = data[k]._data.get(field_name, None) | ||||
|                     if isinstance(v, DBRef): | ||||
|                         data[k]._data[field_name] = self.object_map.get( | ||||
|                             (v.collection, v.id), v | ||||
|                         ) | ||||
|                     elif isinstance(v, (dict, SON)) and "_ref" in v: | ||||
|                             (v.collection, v.id), v) | ||||
|                     elif isinstance(v, (dict, SON)) and '_ref' in v: | ||||
|                         data[k]._data[field_name] = self.object_map.get( | ||||
|                             (v["_ref"].collection, v["_ref"].id), v | ||||
|                         ) | ||||
|                             (v['_ref'].collection, v['_ref'].id), v) | ||||
|                     elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||
|                         item_name = six.text_type("{0}.{1}.{2}").format( | ||||
|                             name, k, field_name | ||||
|                         ) | ||||
|                         data[k]._data[field_name] = self._attach_objects( | ||||
|                             v, depth, instance=instance, name=item_name | ||||
|                         ) | ||||
|                         item_name = six.text_type('{0}.{1}.{2}').format(name, k, field_name) | ||||
|                         data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=item_name) | ||||
|             elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||
|                 item_name = "%s.%s" % (name, k) if name else name | ||||
|                 data[k] = self._attach_objects( | ||||
|                     v, depth - 1, instance=instance, name=item_name | ||||
|                 ) | ||||
|             elif isinstance(v, DBRef) and hasattr(v, "id"): | ||||
|                 item_name = '%s.%s' % (name, k) if name else name | ||||
|                 data[k] = self._attach_objects(v, depth - 1, instance=instance, name=item_name) | ||||
|             elif hasattr(v, 'id'): | ||||
|                 data[k] = self.object_map.get((v.collection, v.id), v) | ||||
|  | ||||
|         if instance and name: | ||||
|   | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,23 +1,11 @@ | ||||
| from collections import defaultdict | ||||
|  | ||||
| import six | ||||
| from six import iteritems | ||||
|  | ||||
| __all__ = ( | ||||
|     "NotRegistered", | ||||
|     "InvalidDocumentError", | ||||
|     "LookUpError", | ||||
|     "DoesNotExist", | ||||
|     "MultipleObjectsReturned", | ||||
|     "InvalidQueryError", | ||||
|     "OperationError", | ||||
|     "NotUniqueError", | ||||
|     "BulkWriteError", | ||||
|     "FieldDoesNotExist", | ||||
|     "ValidationError", | ||||
|     "SaveConditionError", | ||||
|     "DeprecatedError", | ||||
| ) | ||||
| __all__ = ('NotRegistered', 'InvalidDocumentError', 'LookUpError', | ||||
|            'DoesNotExist', 'MultipleObjectsReturned', 'InvalidQueryError', | ||||
|            'OperationError', 'NotUniqueError', 'FieldDoesNotExist', | ||||
|            'ValidationError', 'SaveConditionError') | ||||
|  | ||||
|  | ||||
| class NotRegistered(Exception): | ||||
| @@ -52,10 +40,6 @@ class NotUniqueError(OperationError): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class BulkWriteError(OperationError): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class SaveConditionError(OperationError): | ||||
|     pass | ||||
|  | ||||
| @@ -86,25 +70,24 @@ class ValidationError(AssertionError): | ||||
|     field_name = None | ||||
|     _message = None | ||||
|  | ||||
|     def __init__(self, message="", **kwargs): | ||||
|         super(ValidationError, self).__init__(message) | ||||
|         self.errors = kwargs.get("errors", {}) | ||||
|         self.field_name = kwargs.get("field_name") | ||||
|     def __init__(self, message='', **kwargs): | ||||
|         self.errors = kwargs.get('errors', {}) | ||||
|         self.field_name = kwargs.get('field_name') | ||||
|         self.message = message | ||||
|  | ||||
|     def __str__(self): | ||||
|         return six.text_type(self.message) | ||||
|  | ||||
|     def __repr__(self): | ||||
|         return "%s(%s,)" % (self.__class__.__name__, self.message) | ||||
|         return '%s(%s,)' % (self.__class__.__name__, self.message) | ||||
|  | ||||
|     def __getattribute__(self, name): | ||||
|         message = super(ValidationError, self).__getattribute__(name) | ||||
|         if name == "message": | ||||
|         if name == 'message': | ||||
|             if self.field_name: | ||||
|                 message = "%s" % message | ||||
|                 message = '%s' % message | ||||
|             if self.errors: | ||||
|                 message = "%s(%s)" % (message, self._format_errors()) | ||||
|                 message = '%s(%s)' % (message, self._format_errors()) | ||||
|         return message | ||||
|  | ||||
|     def _get_message(self): | ||||
| @@ -125,8 +108,11 @@ class ValidationError(AssertionError): | ||||
|  | ||||
|         def build_dict(source): | ||||
|             errors_dict = {} | ||||
|             if not source: | ||||
|                 return errors_dict | ||||
|  | ||||
|             if isinstance(source, dict): | ||||
|                 for field_name, error in iteritems(source): | ||||
|                 for field_name, error in source.iteritems(): | ||||
|                     errors_dict[field_name] = build_dict(error) | ||||
|             elif isinstance(source, ValidationError) and source.errors: | ||||
|                 return build_dict(source.errors) | ||||
| @@ -143,22 +129,17 @@ class ValidationError(AssertionError): | ||||
|     def _format_errors(self): | ||||
|         """Returns a string listing all errors within a document""" | ||||
|  | ||||
|         def generate_key(value, prefix=""): | ||||
|         def generate_key(value, prefix=''): | ||||
|             if isinstance(value, list): | ||||
|                 value = " ".join([generate_key(k) for k in value]) | ||||
|                 value = ' '.join([generate_key(k) for k in value]) | ||||
|             elif isinstance(value, dict): | ||||
|                 value = " ".join([generate_key(v, k) for k, v in iteritems(value)]) | ||||
|                 value = ' '.join( | ||||
|                     [generate_key(v, k) for k, v in value.iteritems()]) | ||||
|  | ||||
|             results = "%s.%s" % (prefix, value) if prefix else value | ||||
|             results = '%s.%s' % (prefix, value) if prefix else value | ||||
|             return results | ||||
|  | ||||
|         error_dict = defaultdict(list) | ||||
|         for k, v in iteritems(self.to_dict()): | ||||
|         for k, v in self.to_dict().iteritems(): | ||||
|             error_dict[generate_key(v)].append(k) | ||||
|         return " ".join(["%s: %s" % (k, v) for k, v in iteritems(error_dict)]) | ||||
|  | ||||
|  | ||||
| class DeprecatedError(Exception): | ||||
|     """Raise when a user uses a feature that has been Deprecated""" | ||||
|  | ||||
|     pass | ||||
|         return ' '.join(['%s: %s' % (k, v) for k, v in error_dict.iteritems()]) | ||||
|   | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,19 +0,0 @@ | ||||
| """ | ||||
| Helper functions, constants, and types to aid with MongoDB version support | ||||
| """ | ||||
| from mongoengine.connection import get_connection | ||||
|  | ||||
|  | ||||
| # Constant that can be used to compare the version retrieved with | ||||
| # get_mongodb_version() | ||||
| MONGODB_34 = (3, 4) | ||||
| MONGODB_36 = (3, 6) | ||||
|  | ||||
|  | ||||
| def get_mongodb_version(): | ||||
|     """Return the version of the connected mongoDB (first 2 digits) | ||||
|  | ||||
|     :return: tuple(int, int) | ||||
|     """ | ||||
|     version_list = get_connection().server_info()["versionArray"][:2]  # e.g: (3, 2) | ||||
|     return tuple(version_list) | ||||
| @@ -1,32 +0,0 @@ | ||||
| """ | ||||
| Helper functions, constants, and types to aid with PyMongo v2.7 - v3.x support. | ||||
| """ | ||||
| import pymongo | ||||
|  | ||||
| _PYMONGO_37 = (3, 7) | ||||
|  | ||||
| PYMONGO_VERSION = tuple(pymongo.version_tuple[:2]) | ||||
|  | ||||
| IS_PYMONGO_GTE_37 = PYMONGO_VERSION >= _PYMONGO_37 | ||||
|  | ||||
|  | ||||
| def count_documents(collection, filter): | ||||
|     """Pymongo>3.7 deprecates count in favour of count_documents""" | ||||
|     if IS_PYMONGO_GTE_37: | ||||
|         return collection.count_documents(filter) | ||||
|     else: | ||||
|         count = collection.find(filter).count() | ||||
|     return count | ||||
|  | ||||
|  | ||||
| def list_collection_names(db, include_system_collections=False): | ||||
|     """Pymongo>3.7 deprecates collection_names in favour of list_collection_names""" | ||||
|     if IS_PYMONGO_GTE_37: | ||||
|         collections = db.list_collection_names() | ||||
|     else: | ||||
|         collections = db.collection_names() | ||||
|  | ||||
|     if not include_system_collections: | ||||
|         collections = [c for c in collections if not c.startswith("system.")] | ||||
|  | ||||
|     return collections | ||||
| @@ -1,8 +1,17 @@ | ||||
| """ | ||||
| Helper functions, constants, and types to aid with Python v2.7 - v3.x support | ||||
| Helper functions, constants, and types to aid with Python v2.7 - v3.x and | ||||
| PyMongo v2.7 - v3.x support. | ||||
| """ | ||||
| import pymongo | ||||
| import six | ||||
|  | ||||
|  | ||||
| if pymongo.version_tuple[0] < 3: | ||||
|     IS_PYMONGO_3 = False | ||||
| else: | ||||
|     IS_PYMONGO_3 = True | ||||
|  | ||||
|  | ||||
| # six.BytesIO resolves to StringIO.StringIO in Py2 and io.BytesIO in Py3. | ||||
| StringIO = six.BytesIO | ||||
|  | ||||
| @@ -14,10 +23,3 @@ if not six.PY3: | ||||
|         pass | ||||
|     else: | ||||
|         StringIO = cStringIO.StringIO | ||||
|  | ||||
|  | ||||
| if six.PY3: | ||||
|     from collections.abc import Hashable | ||||
| else: | ||||
|     # raises DeprecationWarnings in Python >=3.7 | ||||
|     from collections import Hashable | ||||
|   | ||||
| @@ -7,22 +7,11 @@ from mongoengine.queryset.visitor import * | ||||
|  | ||||
| # Expose just the public subset of all imported objects and constants. | ||||
| __all__ = ( | ||||
|     "QuerySet", | ||||
|     "QuerySetNoCache", | ||||
|     "Q", | ||||
|     "queryset_manager", | ||||
|     "QuerySetManager", | ||||
|     "QueryFieldList", | ||||
|     "DO_NOTHING", | ||||
|     "NULLIFY", | ||||
|     "CASCADE", | ||||
|     "DENY", | ||||
|     "PULL", | ||||
|     'QuerySet', 'QuerySetNoCache', 'Q', 'queryset_manager', 'QuerySetManager', | ||||
|     'QueryFieldList', 'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL', | ||||
|  | ||||
|     # Errors that might be related to a queryset, mostly here for backward | ||||
|     # compatibility | ||||
|     "DoesNotExist", | ||||
|     "InvalidQueryError", | ||||
|     "MultipleObjectsReturned", | ||||
|     "NotUniqueError", | ||||
|     "OperationError", | ||||
|     'DoesNotExist', 'InvalidQueryError', 'MultipleObjectsReturned', | ||||
|     'NotUniqueError', 'OperationError', | ||||
| ) | ||||
|   | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,15 +1,12 @@ | ||||
| __all__ = ("QueryFieldList",) | ||||
| __all__ = ('QueryFieldList',) | ||||
|  | ||||
|  | ||||
| class QueryFieldList(object): | ||||
|     """Object that handles combinations of .only() and .exclude() calls""" | ||||
|  | ||||
|     ONLY = 1 | ||||
|     EXCLUDE = 0 | ||||
|  | ||||
|     def __init__( | ||||
|         self, fields=None, value=ONLY, always_include=None, _only_called=False | ||||
|     ): | ||||
|     def __init__(self, fields=None, value=ONLY, always_include=None, _only_called=False): | ||||
|         """The QueryFieldList builder | ||||
|  | ||||
|         :param fields: A list of fields used in `.only()` or `.exclude()` | ||||
| @@ -52,7 +49,7 @@ class QueryFieldList(object): | ||||
|             self.fields = f.fields - self.fields | ||||
|             self._clean_slice() | ||||
|  | ||||
|         if "_id" in f.fields: | ||||
|         if '_id' in f.fields: | ||||
|             self._id = f.value | ||||
|  | ||||
|         if self.always_include: | ||||
| @@ -62,21 +59,19 @@ class QueryFieldList(object): | ||||
|             else: | ||||
|                 self.fields -= self.always_include | ||||
|  | ||||
|         if getattr(f, "_only_called", False): | ||||
|         if getattr(f, '_only_called', False): | ||||
|             self._only_called = True | ||||
|         return self | ||||
|  | ||||
|     def __bool__(self): | ||||
|     def __nonzero__(self): | ||||
|         return bool(self.fields) | ||||
|  | ||||
|     __nonzero__ = __bool__  # For Py2 support | ||||
|  | ||||
|     def as_dict(self): | ||||
|         field_list = {field: self.value for field in self.fields} | ||||
|         if self.slice: | ||||
|             field_list.update(self.slice) | ||||
|         if self._id is not None: | ||||
|             field_list["_id"] = self._id | ||||
|             field_list['_id'] = self._id | ||||
|         return field_list | ||||
|  | ||||
|     def reset(self): | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| from functools import partial | ||||
| from mongoengine.queryset.queryset import QuerySet | ||||
|  | ||||
| __all__ = ("queryset_manager", "QuerySetManager") | ||||
| __all__ = ('queryset_manager', 'QuerySetManager') | ||||
|  | ||||
|  | ||||
| class QuerySetManager(object): | ||||
| @@ -33,10 +33,10 @@ class QuerySetManager(object): | ||||
|             return self | ||||
|  | ||||
|         # owner is the document that contains the QuerySetManager | ||||
|         queryset_class = owner._meta.get("queryset_class", self.default) | ||||
|         queryset_class = owner._meta.get('queryset_class', self.default) | ||||
|         queryset = queryset_class(owner, owner._get_collection()) | ||||
|         if self.get_queryset: | ||||
|             arg_count = self.get_queryset.__code__.co_argcount | ||||
|             arg_count = self.get_queryset.func_code.co_argcount | ||||
|             if arg_count == 1: | ||||
|                 queryset = self.get_queryset(queryset) | ||||
|             elif arg_count == 2: | ||||
|   | ||||
| @@ -1,24 +1,9 @@ | ||||
| import six | ||||
|  | ||||
| from mongoengine.errors import OperationError | ||||
| from mongoengine.queryset.base import ( | ||||
|     BaseQuerySet, | ||||
|     CASCADE, | ||||
|     DENY, | ||||
|     DO_NOTHING, | ||||
|     NULLIFY, | ||||
|     PULL, | ||||
| ) | ||||
| from mongoengine.queryset.base import (BaseQuerySet, CASCADE, DENY, DO_NOTHING, | ||||
|                                        NULLIFY, PULL) | ||||
|  | ||||
| __all__ = ( | ||||
|     "QuerySet", | ||||
|     "QuerySetNoCache", | ||||
|     "DO_NOTHING", | ||||
|     "NULLIFY", | ||||
|     "CASCADE", | ||||
|     "DENY", | ||||
|     "PULL", | ||||
| ) | ||||
| __all__ = ('QuerySet', 'QuerySetNoCache', 'DO_NOTHING', 'NULLIFY', 'CASCADE', | ||||
|            'DENY', 'PULL') | ||||
|  | ||||
| # The maximum number of items to display in a QuerySet.__repr__ | ||||
| REPR_OUTPUT_SIZE = 20 | ||||
| @@ -70,12 +55,12 @@ class QuerySet(BaseQuerySet): | ||||
|     def __repr__(self): | ||||
|         """Provide a string representation of the QuerySet""" | ||||
|         if self._iter: | ||||
|             return ".. queryset mid-iteration .." | ||||
|             return '.. queryset mid-iteration ..' | ||||
|  | ||||
|         self._populate_cache() | ||||
|         data = self._result_cache[: REPR_OUTPUT_SIZE + 1] | ||||
|         data = self._result_cache[:REPR_OUTPUT_SIZE + 1] | ||||
|         if len(data) > REPR_OUTPUT_SIZE: | ||||
|             data[-1] = "...(remaining elements truncated)..." | ||||
|             data[-1] = '...(remaining elements truncated)...' | ||||
|         return repr(data) | ||||
|  | ||||
|     def _iter_results(self): | ||||
| @@ -102,10 +87,10 @@ class QuerySet(BaseQuerySet): | ||||
|                 yield self._result_cache[pos] | ||||
|                 pos += 1 | ||||
|  | ||||
|             # return if we already established there were no more | ||||
|             # Raise StopIteration if we already established there were no more | ||||
|             # docs in the db cursor. | ||||
|             if not self._has_more: | ||||
|                 return | ||||
|                 raise StopIteration | ||||
|  | ||||
|             # Otherwise, populate more of the cache and repeat. | ||||
|             if len(self._result_cache) <= pos: | ||||
| @@ -127,8 +112,8 @@ class QuerySet(BaseQuerySet): | ||||
|         # Pull in ITER_CHUNK_SIZE docs from the database and store them in | ||||
|         # the result cache. | ||||
|         try: | ||||
|             for _ in six.moves.range(ITER_CHUNK_SIZE): | ||||
|                 self._result_cache.append(six.next(self)) | ||||
|             for _ in xrange(ITER_CHUNK_SIZE): | ||||
|                 self._result_cache.append(self.next()) | ||||
|         except StopIteration: | ||||
|             # Getting this exception means there are no more docs in the | ||||
|             # db cursor. Set _has_more to False so that we can use that | ||||
| @@ -156,9 +141,10 @@ class QuerySet(BaseQuerySet): | ||||
|         .. versionadded:: 0.8.3 Convert to non caching queryset | ||||
|         """ | ||||
|         if self._result_cache is not None: | ||||
|             raise OperationError("QuerySet already cached") | ||||
|             raise OperationError('QuerySet already cached') | ||||
|  | ||||
|         return self._clone_into(QuerySetNoCache(self._document, self._collection)) | ||||
|         return self._clone_into(QuerySetNoCache(self._document, | ||||
|                                                 self._collection)) | ||||
|  | ||||
|  | ||||
| class QuerySetNoCache(BaseQuerySet): | ||||
| @@ -177,17 +163,17 @@ class QuerySetNoCache(BaseQuerySet): | ||||
|         .. versionchanged:: 0.6.13 Now doesnt modify the cursor | ||||
|         """ | ||||
|         if self._iter: | ||||
|             return ".. queryset mid-iteration .." | ||||
|             return '.. queryset mid-iteration ..' | ||||
|  | ||||
|         data = [] | ||||
|         for _ in six.moves.range(REPR_OUTPUT_SIZE + 1): | ||||
|         for _ in xrange(REPR_OUTPUT_SIZE + 1): | ||||
|             try: | ||||
|                 data.append(six.next(self)) | ||||
|                 data.append(self.next()) | ||||
|             except StopIteration: | ||||
|                 break | ||||
|  | ||||
|         if len(data) > REPR_OUTPUT_SIZE: | ||||
|             data[-1] = "...(remaining elements truncated)..." | ||||
|             data[-1] = '...(remaining elements truncated)...' | ||||
|  | ||||
|         self.rewind() | ||||
|         return repr(data) | ||||
| @@ -198,3 +184,10 @@ class QuerySetNoCache(BaseQuerySet): | ||||
|             queryset = self.clone() | ||||
|         queryset.rewind() | ||||
|         return queryset | ||||
|  | ||||
|  | ||||
| class QuerySetNoDeRef(QuerySet): | ||||
|     """Special no_dereference QuerySet""" | ||||
|  | ||||
|     def __dereference(items, max_depth=1, instance=None, name=None): | ||||
|         return items | ||||
|   | ||||
| @@ -4,60 +4,28 @@ from bson import ObjectId, SON | ||||
| from bson.dbref import DBRef | ||||
| import pymongo | ||||
| import six | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine.base import UPDATE_OPERATORS | ||||
| from mongoengine.common import _import_class | ||||
| from mongoengine.connection import get_connection | ||||
| from mongoengine.errors import InvalidQueryError | ||||
| from mongoengine.python_support import IS_PYMONGO_3 | ||||
|  | ||||
| __all__ = ("query", "update") | ||||
| __all__ = ('query', 'update') | ||||
|  | ||||
| COMPARISON_OPERATORS = ( | ||||
|     "ne", | ||||
|     "gt", | ||||
|     "gte", | ||||
|     "lt", | ||||
|     "lte", | ||||
|     "in", | ||||
|     "nin", | ||||
|     "mod", | ||||
|     "all", | ||||
|     "size", | ||||
|     "exists", | ||||
|     "not", | ||||
|     "elemMatch", | ||||
|     "type", | ||||
| ) | ||||
| GEO_OPERATORS = ( | ||||
|     "within_distance", | ||||
|     "within_spherical_distance", | ||||
|     "within_box", | ||||
|     "within_polygon", | ||||
|     "near", | ||||
|     "near_sphere", | ||||
|     "max_distance", | ||||
|     "min_distance", | ||||
|     "geo_within", | ||||
|     "geo_within_box", | ||||
|     "geo_within_polygon", | ||||
|     "geo_within_center", | ||||
|     "geo_within_sphere", | ||||
|     "geo_intersects", | ||||
| ) | ||||
| STRING_OPERATORS = ( | ||||
|     "contains", | ||||
|     "icontains", | ||||
|     "startswith", | ||||
|     "istartswith", | ||||
|     "endswith", | ||||
|     "iendswith", | ||||
|     "exact", | ||||
|     "iexact", | ||||
| ) | ||||
| CUSTOM_OPERATORS = ("match",) | ||||
| MATCH_OPERATORS = ( | ||||
|     COMPARISON_OPERATORS + GEO_OPERATORS + STRING_OPERATORS + CUSTOM_OPERATORS | ||||
| ) | ||||
| COMPARISON_OPERATORS = ('ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod', | ||||
|                         'all', 'size', 'exists', 'not', 'elemMatch', 'type') | ||||
| GEO_OPERATORS = ('within_distance', 'within_spherical_distance', | ||||
|                  'within_box', 'within_polygon', 'near', 'near_sphere', | ||||
|                  'max_distance', 'min_distance', 'geo_within', 'geo_within_box', | ||||
|                  'geo_within_polygon', 'geo_within_center', | ||||
|                  'geo_within_sphere', 'geo_intersects') | ||||
| STRING_OPERATORS = ('contains', 'icontains', 'startswith', | ||||
|                     'istartswith', 'endswith', 'iendswith', | ||||
|                     'exact', 'iexact') | ||||
| CUSTOM_OPERATORS = ('match',) | ||||
| MATCH_OPERATORS = (COMPARISON_OPERATORS + GEO_OPERATORS + | ||||
|                    STRING_OPERATORS + CUSTOM_OPERATORS) | ||||
|  | ||||
|  | ||||
| # TODO make this less complex | ||||
| @@ -66,11 +34,11 @@ def query(_doc_cls=None, **kwargs): | ||||
|     mongo_query = {} | ||||
|     merge_query = defaultdict(list) | ||||
|     for key, value in sorted(kwargs.items()): | ||||
|         if key == "__raw__": | ||||
|         if key == '__raw__': | ||||
|             mongo_query.update(value) | ||||
|             continue | ||||
|  | ||||
|         parts = key.rsplit("__") | ||||
|         parts = key.rsplit('__') | ||||
|         indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()] | ||||
|         parts = [part for part in parts if not part.isdigit()] | ||||
|         # Check for an operator and transform to mongo-style if there is | ||||
| @@ -79,11 +47,11 @@ def query(_doc_cls=None, **kwargs): | ||||
|             op = parts.pop() | ||||
|  | ||||
|         # Allow to escape operator-like field name by __ | ||||
|         if len(parts) > 1 and parts[-1] == "": | ||||
|         if len(parts) > 1 and parts[-1] == '': | ||||
|             parts.pop() | ||||
|  | ||||
|         negate = False | ||||
|         if len(parts) > 1 and parts[-1] == "not": | ||||
|         if len(parts) > 1 and parts[-1] == 'not': | ||||
|             parts.pop() | ||||
|             negate = True | ||||
|  | ||||
| @@ -95,8 +63,8 @@ def query(_doc_cls=None, **kwargs): | ||||
|                 raise InvalidQueryError(e) | ||||
|             parts = [] | ||||
|  | ||||
|             CachedReferenceField = _import_class("CachedReferenceField") | ||||
|             GenericReferenceField = _import_class("GenericReferenceField") | ||||
|             CachedReferenceField = _import_class('CachedReferenceField') | ||||
|             GenericReferenceField = _import_class('GenericReferenceField') | ||||
|  | ||||
|             cleaned_fields = [] | ||||
|             for field in fields: | ||||
| @@ -106,7 +74,7 @@ def query(_doc_cls=None, **kwargs): | ||||
|                     append_field = False | ||||
|                 # is last and CachedReferenceField | ||||
|                 elif isinstance(field, CachedReferenceField) and fields[-1] == field: | ||||
|                     parts.append("%s._id" % field.db_field) | ||||
|                     parts.append('%s._id' % field.db_field) | ||||
|                 else: | ||||
|                     parts.append(field.db_field) | ||||
|  | ||||
| @@ -116,17 +84,38 @@ def query(_doc_cls=None, **kwargs): | ||||
|             # Convert value to proper value | ||||
|             field = cleaned_fields[-1] | ||||
|  | ||||
|             singular_ops = [None, "ne", "gt", "gte", "lt", "lte", "not"] | ||||
|             singular_ops = [None, 'ne', 'gt', 'gte', 'lt', 'lte', 'not'] | ||||
|             singular_ops += STRING_OPERATORS | ||||
|             if op in singular_ops: | ||||
|                 value = field.prepare_query_value(op, value) | ||||
|                 if isinstance(field, six.string_types): | ||||
|                     if (op in STRING_OPERATORS and | ||||
|                             isinstance(value, six.string_types)): | ||||
|                         StringField = _import_class('StringField') | ||||
|                         value = StringField.prepare_query_value(op, value) | ||||
|                     else: | ||||
|                         value = field | ||||
|                 else: | ||||
|                     value = field.prepare_query_value(op, value) | ||||
|  | ||||
|                 if isinstance(field, CachedReferenceField) and value: | ||||
|                     value = value["_id"] | ||||
|                     if isinstance(field, CachedReferenceField) and value: | ||||
|                         value = value['_id'] | ||||
|  | ||||
|             elif op in ("in", "nin", "all", "near") and not isinstance(value, dict): | ||||
|                 # Raise an error if the in/nin/all/near param is not iterable. | ||||
|                 value = _prepare_query_for_iterable(field, op, value) | ||||
|             elif op in ('in', 'nin', 'all', 'near') and not isinstance(value, dict): | ||||
|                 # Raise an error if the in/nin/all/near param is not iterable. We need a | ||||
|                 # special check for BaseDocument, because - although it's iterable - using | ||||
|                 # it as such in the context of this method is most definitely a mistake. | ||||
|                 BaseDocument = _import_class('BaseDocument') | ||||
|                 if isinstance(value, BaseDocument): | ||||
|                     raise TypeError("When using the `in`, `nin`, `all`, or " | ||||
|                                     "`near`-operators you can\'t use a " | ||||
|                                     "`Document`, you must wrap your object " | ||||
|                                     "in a list (object -> [object]).") | ||||
|                 elif not hasattr(value, '__iter__'): | ||||
|                     raise TypeError("The `in`, `nin`, `all`, or " | ||||
|                                     "`near`-operators must be applied to an " | ||||
|                                     "iterable (e.g. a list).") | ||||
|                 else: | ||||
|                     value = [field.prepare_query_value(op, v) for v in value] | ||||
|  | ||||
|             # If we're querying a GenericReferenceField, we need to alter the | ||||
|             # key depending on the value: | ||||
| @@ -134,77 +123,73 @@ def query(_doc_cls=None, **kwargs): | ||||
|             # * If the value is an ObjectId, the key should be "field_name._ref.$id". | ||||
|             if isinstance(field, GenericReferenceField): | ||||
|                 if isinstance(value, DBRef): | ||||
|                     parts[-1] += "._ref" | ||||
|                     parts[-1] += '._ref' | ||||
|                 elif isinstance(value, ObjectId): | ||||
|                     parts[-1] += "._ref.$id" | ||||
|                     parts[-1] += '._ref.$id' | ||||
|  | ||||
|         # if op and op not in COMPARISON_OPERATORS: | ||||
|         if op: | ||||
|             if op in GEO_OPERATORS: | ||||
|                 value = _geo_operator(field, op, value) | ||||
|             elif op in ("match", "elemMatch"): | ||||
|                 ListField = _import_class("ListField") | ||||
|                 EmbeddedDocumentField = _import_class("EmbeddedDocumentField") | ||||
|             elif op in ('match', 'elemMatch'): | ||||
|                 ListField = _import_class('ListField') | ||||
|                 EmbeddedDocumentField = _import_class('EmbeddedDocumentField') | ||||
|                 if ( | ||||
|                     isinstance(value, dict) | ||||
|                     and isinstance(field, ListField) | ||||
|                     and isinstance(field.field, EmbeddedDocumentField) | ||||
|                     isinstance(value, dict) and | ||||
|                     isinstance(field, ListField) and | ||||
|                     isinstance(field.field, EmbeddedDocumentField) | ||||
|                 ): | ||||
|                     value = query(field.field.document_type, **value) | ||||
|                 else: | ||||
|                     value = field.prepare_query_value(op, value) | ||||
|                 value = {"$elemMatch": value} | ||||
|                 value = {'$elemMatch': value} | ||||
|             elif op in CUSTOM_OPERATORS: | ||||
|                 NotImplementedError( | ||||
|                     'Custom method "%s" has not ' "been implemented" % op | ||||
|                 ) | ||||
|                 NotImplementedError('Custom method "%s" has not ' | ||||
|                                     'been implemented' % op) | ||||
|             elif op not in STRING_OPERATORS: | ||||
|                 value = {"$" + op: value} | ||||
|                 value = {'$' + op: value} | ||||
|  | ||||
|         if negate: | ||||
|             value = {"$not": value} | ||||
|             value = {'$not': value} | ||||
|  | ||||
|         for i, part in indices: | ||||
|             parts.insert(i, part) | ||||
|  | ||||
|         key = ".".join(parts) | ||||
|         key = '.'.join(parts) | ||||
|  | ||||
|         if op is None or key not in mongo_query: | ||||
|             mongo_query[key] = value | ||||
|         elif key in mongo_query: | ||||
|             if isinstance(mongo_query[key], dict) and isinstance(value, dict): | ||||
|             if isinstance(mongo_query[key], dict): | ||||
|                 mongo_query[key].update(value) | ||||
|                 # $max/minDistance needs to come last - convert to SON | ||||
|                 value_dict = mongo_query[key] | ||||
|                 if ("$maxDistance" in value_dict or "$minDistance" in value_dict) and ( | ||||
|                     "$near" in value_dict or "$nearSphere" in value_dict | ||||
|                 ): | ||||
|                 if ('$maxDistance' in value_dict or '$minDistance' in value_dict) and \ | ||||
|                         ('$near' in value_dict or '$nearSphere' in value_dict): | ||||
|                     value_son = SON() | ||||
|                     for k, v in iteritems(value_dict): | ||||
|                         if k == "$maxDistance" or k == "$minDistance": | ||||
|                     for k, v in value_dict.iteritems(): | ||||
|                         if k == '$maxDistance' or k == '$minDistance': | ||||
|                             continue | ||||
|                         value_son[k] = v | ||||
|                     # Required for MongoDB >= 2.6, may fail when combining | ||||
|                     # PyMongo 3+ and MongoDB < 2.6 | ||||
|                     near_embedded = False | ||||
|                     for near_op in ("$near", "$nearSphere"): | ||||
|                         if isinstance(value_dict.get(near_op), dict): | ||||
|                     for near_op in ('$near', '$nearSphere'): | ||||
|                         if isinstance(value_dict.get(near_op), dict) and ( | ||||
|                                 IS_PYMONGO_3 or get_connection().max_wire_version > 1): | ||||
|                             value_son[near_op] = SON(value_son[near_op]) | ||||
|                             if "$maxDistance" in value_dict: | ||||
|                                 value_son[near_op]["$maxDistance"] = value_dict[ | ||||
|                                     "$maxDistance" | ||||
|                                 ] | ||||
|                             if "$minDistance" in value_dict: | ||||
|                                 value_son[near_op]["$minDistance"] = value_dict[ | ||||
|                                     "$minDistance" | ||||
|                                 ] | ||||
|                             if '$maxDistance' in value_dict: | ||||
|                                 value_son[near_op][ | ||||
|                                     '$maxDistance'] = value_dict['$maxDistance'] | ||||
|                             if '$minDistance' in value_dict: | ||||
|                                 value_son[near_op][ | ||||
|                                     '$minDistance'] = value_dict['$minDistance'] | ||||
|                             near_embedded = True | ||||
|  | ||||
|                     if not near_embedded: | ||||
|                         if "$maxDistance" in value_dict: | ||||
|                             value_son["$maxDistance"] = value_dict["$maxDistance"] | ||||
|                         if "$minDistance" in value_dict: | ||||
|                             value_son["$minDistance"] = value_dict["$minDistance"] | ||||
|                         if '$maxDistance' in value_dict: | ||||
|                             value_son['$maxDistance'] = value_dict['$maxDistance'] | ||||
|                         if '$minDistance' in value_dict: | ||||
|                             value_son['$minDistance'] = value_dict['$minDistance'] | ||||
|                     mongo_query[key] = value_son | ||||
|             else: | ||||
|                 # Store for manually merging later | ||||
| @@ -216,10 +201,10 @@ def query(_doc_cls=None, **kwargs): | ||||
|         del mongo_query[k] | ||||
|         if isinstance(v, list): | ||||
|             value = [{k: val} for val in v] | ||||
|             if "$and" in mongo_query.keys(): | ||||
|                 mongo_query["$and"].extend(value) | ||||
|             if '$and' in mongo_query.keys(): | ||||
|                 mongo_query['$and'].extend(value) | ||||
|             else: | ||||
|                 mongo_query["$and"] = value | ||||
|                 mongo_query['$and'] = value | ||||
|  | ||||
|     return mongo_query | ||||
|  | ||||
| @@ -229,44 +214,37 @@ def update(_doc_cls=None, **update): | ||||
|     format. | ||||
|     """ | ||||
|     mongo_update = {} | ||||
|  | ||||
|     for key, value in update.items(): | ||||
|         if key == "__raw__": | ||||
|         if key == '__raw__': | ||||
|             mongo_update.update(value) | ||||
|             continue | ||||
|  | ||||
|         parts = key.split("__") | ||||
|  | ||||
|         parts = key.split('__') | ||||
|         # if there is no operator, default to 'set' | ||||
|         if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS: | ||||
|             parts.insert(0, "set") | ||||
|  | ||||
|             parts.insert(0, 'set') | ||||
|         # Check for an operator and transform to mongo-style if there is | ||||
|         op = None | ||||
|         if parts[0] in UPDATE_OPERATORS: | ||||
|             op = parts.pop(0) | ||||
|             # Convert Pythonic names to Mongo equivalents | ||||
|             operator_map = { | ||||
|                 "push_all": "pushAll", | ||||
|                 "pull_all": "pullAll", | ||||
|                 "dec": "inc", | ||||
|                 "add_to_set": "addToSet", | ||||
|                 "set_on_insert": "setOnInsert", | ||||
|             } | ||||
|             if op == "dec": | ||||
|             if op in ('push_all', 'pull_all'): | ||||
|                 op = op.replace('_all', 'All') | ||||
|             elif op == 'dec': | ||||
|                 # Support decrement by flipping a positive value's sign | ||||
|                 # and using 'inc' | ||||
|                 op = 'inc' | ||||
|                 value = -value | ||||
|             # If the operator doesn't found from operator map, the op value | ||||
|             # will stay unchanged | ||||
|             op = operator_map.get(op, op) | ||||
|             elif op == 'add_to_set': | ||||
|                 op = 'addToSet' | ||||
|             elif op == 'set_on_insert': | ||||
|                 op = 'setOnInsert' | ||||
|  | ||||
|         match = None | ||||
|         if parts[-1] in COMPARISON_OPERATORS: | ||||
|             match = parts.pop() | ||||
|  | ||||
|         # Allow to escape operator-like field name by __ | ||||
|         if len(parts) > 1 and parts[-1] == "": | ||||
|         if len(parts) > 1 and parts[-1] == '': | ||||
|             parts.pop() | ||||
|  | ||||
|         if _doc_cls: | ||||
| @@ -283,8 +261,8 @@ def update(_doc_cls=None, **update): | ||||
|                 append_field = True | ||||
|                 if isinstance(field, six.string_types): | ||||
|                     # Convert the S operator to $ | ||||
|                     if field == "S": | ||||
|                         field = "$" | ||||
|                     if field == 'S': | ||||
|                         field = '$' | ||||
|                     parts.append(field) | ||||
|                     append_field = False | ||||
|                 else: | ||||
| @@ -292,7 +270,7 @@ def update(_doc_cls=None, **update): | ||||
|                 if append_field: | ||||
|                     appended_sub_field = False | ||||
|                     cleaned_fields.append(field) | ||||
|                     if hasattr(field, "field"): | ||||
|                     if hasattr(field, 'field'): | ||||
|                         cleaned_fields.append(field.field) | ||||
|                         appended_sub_field = True | ||||
|  | ||||
| @@ -302,91 +280,63 @@ def update(_doc_cls=None, **update): | ||||
|             else: | ||||
|                 field = cleaned_fields[-1] | ||||
|  | ||||
|             GeoJsonBaseField = _import_class("GeoJsonBaseField") | ||||
|             GeoJsonBaseField = _import_class('GeoJsonBaseField') | ||||
|             if isinstance(field, GeoJsonBaseField): | ||||
|                 value = field.to_mongo(value) | ||||
|  | ||||
|             if op == "pull": | ||||
|                 if field.required or value is not None: | ||||
|                     if match in ("in", "nin") and not isinstance(value, dict): | ||||
|                         value = _prepare_query_for_iterable(field, op, value) | ||||
|                     else: | ||||
|                         value = field.prepare_query_value(op, value) | ||||
|             elif op == "push" and isinstance(value, (list, tuple, set)): | ||||
|                 value = [field.prepare_query_value(op, v) for v in value] | ||||
|             elif op in (None, "set", "push"): | ||||
|             if op in (None, 'set', 'push', 'pull'): | ||||
|                 if field.required or value is not None: | ||||
|                     value = field.prepare_query_value(op, value) | ||||
|             elif op in ("pushAll", "pullAll"): | ||||
|             elif op in ('pushAll', 'pullAll'): | ||||
|                 value = [field.prepare_query_value(op, v) for v in value] | ||||
|             elif op in ("addToSet", "setOnInsert"): | ||||
|             elif op in ('addToSet', 'setOnInsert'): | ||||
|                 if isinstance(value, (list, tuple, set)): | ||||
|                     value = [field.prepare_query_value(op, v) for v in value] | ||||
|                 elif field.required or value is not None: | ||||
|                     value = field.prepare_query_value(op, value) | ||||
|             elif op == "unset": | ||||
|             elif op == 'unset': | ||||
|                 value = 1 | ||||
|             elif op == "inc": | ||||
|                 value = field.prepare_query_value(op, value) | ||||
|  | ||||
|         if match: | ||||
|             match = "$" + match | ||||
|             match = '$' + match | ||||
|             value = {match: value} | ||||
|  | ||||
|         key = ".".join(parts) | ||||
|         key = '.'.join(parts) | ||||
|  | ||||
|         if "pull" in op and "." in key: | ||||
|         if not op: | ||||
|             raise InvalidQueryError('Updates must supply an operation ' | ||||
|                                     'eg: set__FIELD=value') | ||||
|  | ||||
|         if 'pull' in op and '.' in key: | ||||
|             # Dot operators don't work on pull operations | ||||
|             # unless they point to a list field | ||||
|             # Otherwise it uses nested dict syntax | ||||
|             if op == "pullAll": | ||||
|                 raise InvalidQueryError( | ||||
|                     "pullAll operations only support a single field depth" | ||||
|                 ) | ||||
|             if op == 'pullAll': | ||||
|                 raise InvalidQueryError('pullAll operations only support ' | ||||
|                                         'a single field depth') | ||||
|  | ||||
|             # Look for the last list field and use dot notation until there | ||||
|             field_classes = [c.__class__ for c in cleaned_fields] | ||||
|             field_classes.reverse() | ||||
|             ListField = _import_class("ListField") | ||||
|             EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField") | ||||
|             if ListField in field_classes or EmbeddedDocumentListField in field_classes: | ||||
|                 # Join all fields via dot notation to the last ListField or EmbeddedDocumentListField | ||||
|             ListField = _import_class('ListField') | ||||
|             if ListField in field_classes: | ||||
|                 # Join all fields via dot notation to the last ListField | ||||
|                 # Then process as normal | ||||
|                 if ListField in field_classes: | ||||
|                     _check_field = ListField | ||||
|                 else: | ||||
|                     _check_field = EmbeddedDocumentListField | ||||
|  | ||||
|                 last_listField = len(cleaned_fields) - field_classes.index(_check_field) | ||||
|                 key = ".".join(parts[:last_listField]) | ||||
|                 last_listField = len( | ||||
|                     cleaned_fields) - field_classes.index(ListField) | ||||
|                 key = '.'.join(parts[:last_listField]) | ||||
|                 parts = parts[last_listField:] | ||||
|                 parts.insert(0, key) | ||||
|  | ||||
|             parts.reverse() | ||||
|             for key in parts: | ||||
|                 value = {key: value} | ||||
|         elif op == "addToSet" and isinstance(value, list): | ||||
|             value = {key: {"$each": value}} | ||||
|         elif op in ("push", "pushAll"): | ||||
|             if parts[-1].isdigit(): | ||||
|                 key = ".".join(parts[0:-1]) | ||||
|                 position = int(parts[-1]) | ||||
|                 # $position expects an iterable. If pushing a single value, | ||||
|                 # wrap it in a list. | ||||
|                 if not isinstance(value, (set, tuple, list)): | ||||
|                     value = [value] | ||||
|                 value = {key: {"$each": value, "$position": position}} | ||||
|             else: | ||||
|                 if op == "pushAll": | ||||
|                     op = "push"  # convert to non-deprecated keyword | ||||
|                     if not isinstance(value, (set, tuple, list)): | ||||
|                         value = [value] | ||||
|                     value = {key: {"$each": value}} | ||||
|                 else: | ||||
|                     value = {key: value} | ||||
|         elif op == 'addToSet' and isinstance(value, list): | ||||
|             value = {key: {'$each': value}} | ||||
|         else: | ||||
|             value = {key: value} | ||||
|         key = "$" + op | ||||
|         key = '$' + op | ||||
|  | ||||
|         if key not in mongo_update: | ||||
|             mongo_update[key] = value | ||||
|         elif key in mongo_update and isinstance(mongo_update[key], dict): | ||||
| @@ -397,45 +347,45 @@ def update(_doc_cls=None, **update): | ||||
|  | ||||
| def _geo_operator(field, op, value): | ||||
|     """Helper to return the query for a given geo query.""" | ||||
|     if op == "max_distance": | ||||
|         value = {"$maxDistance": value} | ||||
|     elif op == "min_distance": | ||||
|         value = {"$minDistance": value} | ||||
|     if op == 'max_distance': | ||||
|         value = {'$maxDistance': value} | ||||
|     elif op == 'min_distance': | ||||
|         value = {'$minDistance': value} | ||||
|     elif field._geo_index == pymongo.GEO2D: | ||||
|         if op == "within_distance": | ||||
|             value = {"$within": {"$center": value}} | ||||
|         elif op == "within_spherical_distance": | ||||
|             value = {"$within": {"$centerSphere": value}} | ||||
|         elif op == "within_polygon": | ||||
|             value = {"$within": {"$polygon": value}} | ||||
|         elif op == "near": | ||||
|             value = {"$near": value} | ||||
|         elif op == "near_sphere": | ||||
|             value = {"$nearSphere": value} | ||||
|         elif op == "within_box": | ||||
|             value = {"$within": {"$box": value}} | ||||
|         if op == 'within_distance': | ||||
|             value = {'$within': {'$center': value}} | ||||
|         elif op == 'within_spherical_distance': | ||||
|             value = {'$within': {'$centerSphere': value}} | ||||
|         elif op == 'within_polygon': | ||||
|             value = {'$within': {'$polygon': value}} | ||||
|         elif op == 'near': | ||||
|             value = {'$near': value} | ||||
|         elif op == 'near_sphere': | ||||
|             value = {'$nearSphere': value} | ||||
|         elif op == 'within_box': | ||||
|             value = {'$within': {'$box': value}} | ||||
|         else: | ||||
|             raise NotImplementedError( | ||||
|                 'Geo method "%s" has not been ' "implemented for a GeoPointField" % op | ||||
|             ) | ||||
|             raise NotImplementedError('Geo method "%s" has not been ' | ||||
|                                       'implemented for a GeoPointField' % op) | ||||
|     else: | ||||
|         if op == "geo_within": | ||||
|             value = {"$geoWithin": _infer_geometry(value)} | ||||
|         elif op == "geo_within_box": | ||||
|             value = {"$geoWithin": {"$box": value}} | ||||
|         elif op == "geo_within_polygon": | ||||
|             value = {"$geoWithin": {"$polygon": value}} | ||||
|         elif op == "geo_within_center": | ||||
|             value = {"$geoWithin": {"$center": value}} | ||||
|         elif op == "geo_within_sphere": | ||||
|             value = {"$geoWithin": {"$centerSphere": value}} | ||||
|         elif op == "geo_intersects": | ||||
|             value = {"$geoIntersects": _infer_geometry(value)} | ||||
|         elif op == "near": | ||||
|             value = {"$near": _infer_geometry(value)} | ||||
|         if op == 'geo_within': | ||||
|             value = {'$geoWithin': _infer_geometry(value)} | ||||
|         elif op == 'geo_within_box': | ||||
|             value = {'$geoWithin': {'$box': value}} | ||||
|         elif op == 'geo_within_polygon': | ||||
|             value = {'$geoWithin': {'$polygon': value}} | ||||
|         elif op == 'geo_within_center': | ||||
|             value = {'$geoWithin': {'$center': value}} | ||||
|         elif op == 'geo_within_sphere': | ||||
|             value = {'$geoWithin': {'$centerSphere': value}} | ||||
|         elif op == 'geo_intersects': | ||||
|             value = {'$geoIntersects': _infer_geometry(value)} | ||||
|         elif op == 'near': | ||||
|             value = {'$near': _infer_geometry(value)} | ||||
|         else: | ||||
|             raise NotImplementedError( | ||||
|                 'Geo method "%s" has not been implemented for a %s ' % (op, field._name) | ||||
|                 'Geo method "%s" has not been implemented for a %s ' | ||||
|                 % (op, field._name) | ||||
|             ) | ||||
|     return value | ||||
|  | ||||
| @@ -445,58 +395,33 @@ def _infer_geometry(value): | ||||
|     given value. | ||||
|     """ | ||||
|     if isinstance(value, dict): | ||||
|         if "$geometry" in value: | ||||
|         if '$geometry' in value: | ||||
|             return value | ||||
|         elif "coordinates" in value and "type" in value: | ||||
|             return {"$geometry": value} | ||||
|         raise InvalidQueryError( | ||||
|             "Invalid $geometry dictionary should have type and coordinates keys" | ||||
|         ) | ||||
|         elif 'coordinates' in value and 'type' in value: | ||||
|             return {'$geometry': value} | ||||
|         raise InvalidQueryError('Invalid $geometry dictionary should have ' | ||||
|                                 'type and coordinates keys') | ||||
|     elif isinstance(value, (list, set)): | ||||
|         # TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon? | ||||
|         # TODO: should both TypeError and IndexError be alike interpreted? | ||||
|  | ||||
|         try: | ||||
|             value[0][0][0] | ||||
|             return {"$geometry": {"type": "Polygon", "coordinates": value}} | ||||
|             return {'$geometry': {'type': 'Polygon', 'coordinates': value}} | ||||
|         except (TypeError, IndexError): | ||||
|             pass | ||||
|  | ||||
|         try: | ||||
|             value[0][0] | ||||
|             return {"$geometry": {"type": "LineString", "coordinates": value}} | ||||
|             return {'$geometry': {'type': 'LineString', 'coordinates': value}} | ||||
|         except (TypeError, IndexError): | ||||
|             pass | ||||
|  | ||||
|         try: | ||||
|             value[0] | ||||
|             return {"$geometry": {"type": "Point", "coordinates": value}} | ||||
|             return {'$geometry': {'type': 'Point', 'coordinates': value}} | ||||
|         except (TypeError, IndexError): | ||||
|             pass | ||||
|  | ||||
|     raise InvalidQueryError( | ||||
|         "Invalid $geometry data. Can be either a " | ||||
|         "dictionary or (nested) lists of coordinate(s)" | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def _prepare_query_for_iterable(field, op, value): | ||||
|     # We need a special check for BaseDocument, because - although it's iterable - using | ||||
|     # it as such in the context of this method is most definitely a mistake. | ||||
|     BaseDocument = _import_class("BaseDocument") | ||||
|  | ||||
|     if isinstance(value, BaseDocument): | ||||
|         raise TypeError( | ||||
|             "When using the `in`, `nin`, `all`, or " | ||||
|             "`near`-operators you can't use a " | ||||
|             "`Document`, you must wrap your object " | ||||
|             "in a list (object -> [object])." | ||||
|         ) | ||||
|  | ||||
|     if not hasattr(value, "__iter__"): | ||||
|         raise TypeError( | ||||
|             "The `in`, `nin`, `all`, or " | ||||
|             "`near`-operators must be applied to an " | ||||
|             "iterable (e.g. a list)." | ||||
|         ) | ||||
|  | ||||
|     return [field.prepare_query_value(op, v) for v in value] | ||||
|     raise InvalidQueryError('Invalid $geometry data. Can be either a ' | ||||
|                             'dictionary or (nested) lists of coordinate(s)') | ||||
|   | ||||
| @@ -1,10 +1,9 @@ | ||||
| import copy | ||||
| import warnings | ||||
|  | ||||
| from mongoengine.errors import InvalidQueryError | ||||
| from mongoengine.queryset import transform | ||||
|  | ||||
| __all__ = ("Q", "QNode") | ||||
| __all__ = ('Q',) | ||||
|  | ||||
|  | ||||
| class QNodeVisitor(object): | ||||
| @@ -70,9 +69,9 @@ class QueryCompilerVisitor(QNodeVisitor): | ||||
|         self.document = document | ||||
|  | ||||
|     def visit_combination(self, combination): | ||||
|         operator = "$and" | ||||
|         operator = '$and' | ||||
|         if combination.operation == combination.OR: | ||||
|             operator = "$or" | ||||
|             operator = '$or' | ||||
|         return {operator: combination.children} | ||||
|  | ||||
|     def visit_query(self, query): | ||||
| @@ -97,11 +96,9 @@ class QNode(object): | ||||
|         """Combine this node with another node into a QCombination | ||||
|         object. | ||||
|         """ | ||||
|         # If the other Q() is empty, ignore it and just use `self`. | ||||
|         if getattr(other, "empty", True): | ||||
|         if getattr(other, 'empty', True): | ||||
|             return self | ||||
|  | ||||
|         # Or if this Q is empty, ignore it and just use `other`. | ||||
|         if self.empty: | ||||
|             return other | ||||
|  | ||||
| @@ -109,8 +106,6 @@ class QNode(object): | ||||
|  | ||||
|     @property | ||||
|     def empty(self): | ||||
|         msg = "'empty' property is deprecated in favour of using 'not bool(filter)'" | ||||
|         warnings.warn(msg, DeprecationWarning) | ||||
|         return False | ||||
|  | ||||
|     def __or__(self, other): | ||||
| @@ -136,15 +131,6 @@ class QCombination(QNode): | ||||
|             else: | ||||
|                 self.children.append(node) | ||||
|  | ||||
|     def __repr__(self): | ||||
|         op = " & " if self.operation is self.AND else " | " | ||||
|         return "(%s)" % op.join([repr(node) for node in self.children]) | ||||
|  | ||||
|     def __bool__(self): | ||||
|         return bool(self.children) | ||||
|  | ||||
|     __nonzero__ = __bool__  # For Py2 support | ||||
|  | ||||
|     def accept(self, visitor): | ||||
|         for i in range(len(self.children)): | ||||
|             if isinstance(self.children[i], QNode): | ||||
| @@ -154,17 +140,8 @@ class QCombination(QNode): | ||||
|  | ||||
|     @property | ||||
|     def empty(self): | ||||
|         msg = "'empty' property is deprecated in favour of using 'not bool(filter)'" | ||||
|         warnings.warn(msg, DeprecationWarning) | ||||
|         return not bool(self.children) | ||||
|  | ||||
|     def __eq__(self, other): | ||||
|         return ( | ||||
|             self.__class__ == other.__class__ | ||||
|             and self.operation == other.operation | ||||
|             and self.children == other.children | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class Q(QNode): | ||||
|     """A simple query object, used in a query tree to build up more complex | ||||
| @@ -174,17 +151,6 @@ class Q(QNode): | ||||
|     def __init__(self, **query): | ||||
|         self.query = query | ||||
|  | ||||
|     def __repr__(self): | ||||
|         return "Q(**%s)" % repr(self.query) | ||||
|  | ||||
|     def __bool__(self): | ||||
|         return bool(self.query) | ||||
|  | ||||
|     __nonzero__ = __bool__  # For Py2 support | ||||
|  | ||||
|     def __eq__(self, other): | ||||
|         return self.__class__ == other.__class__ and self.query == other.query | ||||
|  | ||||
|     def accept(self, visitor): | ||||
|         return visitor.visit_query(self) | ||||
|  | ||||
|   | ||||
| @@ -1,12 +1,5 @@ | ||||
| __all__ = ( | ||||
|     "pre_init", | ||||
|     "post_init", | ||||
|     "pre_save", | ||||
|     "pre_save_post_validation", | ||||
|     "post_save", | ||||
|     "pre_delete", | ||||
|     "post_delete", | ||||
| ) | ||||
| __all__ = ('pre_init', 'post_init', 'pre_save', 'pre_save_post_validation', | ||||
|            'post_save', 'pre_delete', 'post_delete') | ||||
|  | ||||
| signals_available = False | ||||
| try: | ||||
| @@ -14,7 +7,6 @@ try: | ||||
|  | ||||
|     signals_available = True | ||||
| except ImportError: | ||||
|  | ||||
|     class Namespace(object): | ||||
|         def signal(self, name, doc=None): | ||||
|             return _FakeSignal(name, doc) | ||||
| @@ -31,16 +23,13 @@ except ImportError: | ||||
|             self.__doc__ = doc | ||||
|  | ||||
|         def _fail(self, *args, **kwargs): | ||||
|             raise RuntimeError( | ||||
|                 "signalling support is unavailable " | ||||
|                 "because the blinker library is " | ||||
|                 "not installed." | ||||
|             ) | ||||
|             raise RuntimeError('signalling support is unavailable ' | ||||
|                                'because the blinker library is ' | ||||
|                                'not installed.') | ||||
|  | ||||
|         send = lambda *a, **kw: None  # noqa | ||||
|         connect = ( | ||||
|             disconnect | ||||
|         ) = has_receivers_for = receivers_for = temporarily_connected_to = _fail | ||||
|         connect = disconnect = has_receivers_for = receivers_for = \ | ||||
|             temporarily_connected_to = _fail | ||||
|         del _fail | ||||
|  | ||||
|  | ||||
| @@ -48,12 +37,12 @@ except ImportError: | ||||
| # not put signals in here.  Create your own namespace instead. | ||||
| _signals = Namespace() | ||||
|  | ||||
| pre_init = _signals.signal("pre_init") | ||||
| post_init = _signals.signal("post_init") | ||||
| pre_save = _signals.signal("pre_save") | ||||
| pre_save_post_validation = _signals.signal("pre_save_post_validation") | ||||
| post_save = _signals.signal("post_save") | ||||
| pre_delete = _signals.signal("pre_delete") | ||||
| post_delete = _signals.signal("post_delete") | ||||
| pre_bulk_insert = _signals.signal("pre_bulk_insert") | ||||
| post_bulk_insert = _signals.signal("post_bulk_insert") | ||||
| pre_init = _signals.signal('pre_init') | ||||
| post_init = _signals.signal('post_init') | ||||
| pre_save = _signals.signal('pre_save') | ||||
| pre_save_post_validation = _signals.signal('pre_save_post_validation') | ||||
| post_save = _signals.signal('post_save') | ||||
| pre_delete = _signals.signal('pre_delete') | ||||
| post_delete = _signals.signal('post_delete') | ||||
| pre_bulk_insert = _signals.signal('pre_bulk_insert') | ||||
| post_bulk_insert = _signals.signal('post_bulk_insert') | ||||
|   | ||||
| @@ -1,3 +0,0 @@ | ||||
| black | ||||
| flake8 | ||||
| flake8-import-order | ||||
| @@ -1,4 +1,7 @@ | ||||
| pymongo>=3.4 | ||||
| nose | ||||
| pymongo>=2.7.1 | ||||
| six==1.10.0 | ||||
| flake8 | ||||
| flake8-import-order | ||||
| Sphinx==1.5.5 | ||||
| sphinx-rtd-theme==0.2.4 | ||||
|   | ||||
							
								
								
									
										13
									
								
								setup.cfg
									
									
									
									
									
								
							
							
						
						
									
										13
									
								
								setup.cfg
									
									
									
									
									
								
							| @@ -1,10 +1,11 @@ | ||||
| [nosetests] | ||||
| verbosity=2 | ||||
| detailed-errors=1 | ||||
| tests=tests | ||||
| cover-package=mongoengine | ||||
|  | ||||
| [flake8] | ||||
| ignore=E501,F401,F403,F405,I201,I202,W504, W605, W503 | ||||
| ignore=E501,F401,F403,F405,I201 | ||||
| exclude=build,dist,docs,venv,venv3,.tox,.eggs,tests | ||||
| max-complexity=47 | ||||
| application-import-names=mongoengine,tests | ||||
|  | ||||
| [tool:pytest] | ||||
| # Limits the discovery to tests directory | ||||
| # avoids that it runs for instance the benchmark | ||||
| testpaths = tests | ||||
|   | ||||
							
								
								
									
										136
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										136
									
								
								setup.py
									
									
									
									
									
								
							| @@ -1,9 +1,6 @@ | ||||
| import os | ||||
| import sys | ||||
|  | ||||
| from pkg_resources import normalize_path | ||||
| from setuptools import find_packages, setup | ||||
| from setuptools.command.test import test as TestCommand | ||||
|  | ||||
| # Hack to silence atexit traceback in newer python versions | ||||
| try: | ||||
| @@ -11,10 +8,13 @@ try: | ||||
| except ImportError: | ||||
|     pass | ||||
|  | ||||
| DESCRIPTION = "MongoEngine is a Python Object-Document Mapper for working with MongoDB." | ||||
| DESCRIPTION = ( | ||||
|     'MongoEngine is a Python Object-Document ' | ||||
|     'Mapper for working with MongoDB.' | ||||
| ) | ||||
|  | ||||
| try: | ||||
|     with open("README.rst") as fin: | ||||
|     with open('README.rst') as fin: | ||||
|         LONG_DESCRIPTION = fin.read() | ||||
| except Exception: | ||||
|     LONG_DESCRIPTION = None | ||||
| @@ -24,126 +24,64 @@ def get_version(version_tuple): | ||||
|     """Return the version tuple as a string, e.g. for (0, 10, 7), | ||||
|     return '0.10.7'. | ||||
|     """ | ||||
|     return ".".join(map(str, version_tuple)) | ||||
|  | ||||
|  | ||||
| class PyTest(TestCommand): | ||||
|     """Will force pytest to search for tests inside the build directory | ||||
|     for 2to3 converted code (used by tox), instead of the current directory. | ||||
|     Required as long as we need 2to3 | ||||
|  | ||||
|     Known Limitation: https://tox.readthedocs.io/en/latest/example/pytest.html#known-issues-and-limitations | ||||
|     Source: https://www.hackzine.org/python-testing-with-pytest-and-2to3-plus-tox-and-travis-ci.html | ||||
|     """ | ||||
|  | ||||
|     # https://pytest.readthedocs.io/en/2.7.3/goodpractises.html#integration-with-setuptools-test-commands | ||||
|     # Allows to provide pytest command argument through the test runner command `python setup.py test` | ||||
|     # e.g: `python setup.py test -a "-k=test"` | ||||
|     # This only works for 1 argument though | ||||
|     user_options = [("pytest-args=", "a", "Arguments to pass to py.test")] | ||||
|  | ||||
|     def initialize_options(self): | ||||
|         TestCommand.initialize_options(self) | ||||
|         self.pytest_args = "" | ||||
|  | ||||
|     def finalize_options(self): | ||||
|         TestCommand.finalize_options(self) | ||||
|         self.test_args = ["tests"] | ||||
|         self.test_suite = True | ||||
|  | ||||
|     def run_tests(self): | ||||
|         # import here, cause outside the eggs aren't loaded | ||||
|         from pkg_resources import _namespace_packages | ||||
|         import pytest | ||||
|  | ||||
|         # Purge modules under test from sys.modules. The test loader will | ||||
|         # re-import them from the build location. Required when 2to3 is used | ||||
|         # with namespace packages. | ||||
|         if sys.version_info >= (3,) and getattr(self.distribution, "use_2to3", False): | ||||
|             module = self.test_args[-1].split(".")[0] | ||||
|             if module in _namespace_packages: | ||||
|                 del_modules = [] | ||||
|                 if module in sys.modules: | ||||
|                     del_modules.append(module) | ||||
|                 module += "." | ||||
|                 for name in sys.modules: | ||||
|                     if name.startswith(module): | ||||
|                         del_modules.append(name) | ||||
|                 map(sys.modules.__delitem__, del_modules) | ||||
|  | ||||
|             # Run on the build directory for 2to3-built code | ||||
|             # This will prevent the old 2.x code from being found | ||||
|             # by py.test discovery mechanism, that apparently | ||||
|             # ignores sys.path.. | ||||
|             ei_cmd = self.get_finalized_command("egg_info") | ||||
|             self.test_args = [normalize_path(ei_cmd.egg_base)] | ||||
|  | ||||
|         cmd_args = self.test_args + ([self.pytest_args] if self.pytest_args else []) | ||||
|         errno = pytest.main(cmd_args) | ||||
|         sys.exit(errno) | ||||
|     return '.'.join(map(str, version_tuple)) | ||||
|  | ||||
|  | ||||
| # Dirty hack to get version number from monogengine/__init__.py - we can't | ||||
| # import it as it depends on PyMongo and PyMongo isn't installed until this | ||||
| # file is read | ||||
| init = os.path.join(os.path.dirname(__file__), "mongoengine", "__init__.py") | ||||
| version_line = list(filter(lambda l: l.startswith("VERSION"), open(init)))[0] | ||||
| init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py') | ||||
| version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0] | ||||
|  | ||||
| VERSION = get_version(eval(version_line.split("=")[-1])) | ||||
| VERSION = get_version(eval(version_line.split('=')[-1])) | ||||
|  | ||||
| CLASSIFIERS = [ | ||||
|     "Development Status :: 4 - Beta", | ||||
|     "Intended Audience :: Developers", | ||||
|     "License :: OSI Approved :: MIT License", | ||||
|     "Operating System :: OS Independent", | ||||
|     "Programming Language :: Python", | ||||
|     'Development Status :: 4 - Beta', | ||||
|     'Intended Audience :: Developers', | ||||
|     'License :: OSI Approved :: MIT License', | ||||
|     'Operating System :: OS Independent', | ||||
|     'Programming Language :: Python', | ||||
|     "Programming Language :: Python :: 2", | ||||
|     "Programming Language :: Python :: 2.7", | ||||
|     "Programming Language :: Python :: 3", | ||||
|     "Programming Language :: Python :: 3.3", | ||||
|     "Programming Language :: Python :: 3.4", | ||||
|     "Programming Language :: Python :: 3.5", | ||||
|     "Programming Language :: Python :: 3.6", | ||||
|     "Programming Language :: Python :: Implementation :: CPython", | ||||
|     "Programming Language :: Python :: Implementation :: PyPy", | ||||
|     "Topic :: Database", | ||||
|     "Topic :: Software Development :: Libraries :: Python Modules", | ||||
|     'Topic :: Database', | ||||
|     'Topic :: Software Development :: Libraries :: Python Modules', | ||||
| ] | ||||
|  | ||||
| extra_opts = { | ||||
|     "packages": find_packages(exclude=["tests", "tests.*"]), | ||||
|     "tests_require": [ | ||||
|         "pytest<5.0", | ||||
|         "pytest-cov", | ||||
|         "coverage<5.0",  # recent coverage switched to sqlite format for the .coverage file which isn't handled properly by coveralls | ||||
|         "blinker", | ||||
|         "Pillow>=2.0.0, <7.0.0",  # 7.0.0 dropped Python2 support | ||||
|     ], | ||||
|     'packages': find_packages(exclude=['tests', 'tests.*']), | ||||
|     'tests_require': ['nose', 'coverage==4.2', 'blinker', 'Pillow>=2.0.0'] | ||||
| } | ||||
| if sys.version_info[0] == 3: | ||||
|     extra_opts["use_2to3"] = True | ||||
|     if "test" in sys.argv: | ||||
|         extra_opts["packages"] = find_packages() | ||||
|         extra_opts["package_data"] = { | ||||
|             "tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"] | ||||
|         } | ||||
|     extra_opts['use_2to3'] = True | ||||
|     if 'test' in sys.argv or 'nosetests' in sys.argv: | ||||
|         extra_opts['packages'] = find_packages() | ||||
|         extra_opts['package_data'] = { | ||||
|             'tests': ['fields/mongoengine.png', 'fields/mongodb_leaf.png']} | ||||
| else: | ||||
|     extra_opts["tests_require"] += ["python-dateutil"] | ||||
|     extra_opts['tests_require'] += ['python-dateutil'] | ||||
|  | ||||
| setup( | ||||
|     name="mongoengine", | ||||
|     name='mongoengine', | ||||
|     version=VERSION, | ||||
|     author="Harry Marr", | ||||
|     author_email="harry.marr@gmail.com", | ||||
|     maintainer="Stefan Wojcik", | ||||
|     maintainer_email="wojcikstefan@gmail.com", | ||||
|     url="http://mongoengine.org/", | ||||
|     download_url="https://github.com/MongoEngine/mongoengine/tarball/master", | ||||
|     license="MIT", | ||||
|     author='Harry Marr', | ||||
|     author_email='harry.marr@{nospam}gmail.com', | ||||
|     maintainer="Ross Lawley", | ||||
|     maintainer_email="ross.lawley@{nospam}gmail.com", | ||||
|     url='http://mongoengine.org/', | ||||
|     download_url='https://github.com/MongoEngine/mongoengine/tarball/master', | ||||
|     license='MIT', | ||||
|     include_package_data=True, | ||||
|     description=DESCRIPTION, | ||||
|     long_description=LONG_DESCRIPTION, | ||||
|     platforms=["any"], | ||||
|     platforms=['any'], | ||||
|     classifiers=CLASSIFIERS, | ||||
|     install_requires=["pymongo>=3.4", "six>=1.10.0"], | ||||
|     cmdclass={"test": PyTest}, | ||||
|     install_requires=['pymongo>=2.7.1', 'six'], | ||||
|     test_suite='nose.collector', | ||||
|     **extra_opts | ||||
| ) | ||||
|   | ||||
| @@ -0,0 +1,4 @@ | ||||
| from all_warnings import AllWarnings | ||||
| from document import * | ||||
| from queryset import * | ||||
| from fields import * | ||||
|   | ||||
| @@ -0,0 +1,42 @@ | ||||
| """ | ||||
| This test has been put into a module.  This is because it tests warnings that | ||||
| only get triggered on first hit.  This way we can ensure its imported into the | ||||
| top level and called first by the test suite. | ||||
| """ | ||||
| import unittest | ||||
| import warnings | ||||
|  | ||||
| from mongoengine import * | ||||
|  | ||||
|  | ||||
| __all__ = ('AllWarnings', ) | ||||
|  | ||||
|  | ||||
| class AllWarnings(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.warning_list = [] | ||||
|         self.showwarning_default = warnings.showwarning | ||||
|         warnings.showwarning = self.append_to_warning_list | ||||
|  | ||||
|     def append_to_warning_list(self, message, category, *args): | ||||
|         self.warning_list.append({"message": message, | ||||
|                                   "category": category}) | ||||
|  | ||||
|     def tearDown(self): | ||||
|         # restore default handling of warnings | ||||
|         warnings.showwarning = self.showwarning_default | ||||
|  | ||||
|     def test_document_collection_syntax_warning(self): | ||||
|  | ||||
|         class NonAbstractBase(Document): | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         class InheritedDocumentFailTest(NonAbstractBase): | ||||
|             meta = {'collection': 'fail'} | ||||
|  | ||||
|         warning = self.warning_list[0] | ||||
|         self.assertEqual(SyntaxWarning, warning["category"]) | ||||
|         self.assertEqual('non_abstract_base', | ||||
|                          InheritedDocumentFailTest._get_collection_name()) | ||||
|   | ||||
| @@ -1,35 +0,0 @@ | ||||
| """ | ||||
| This test has been put into a module.  This is because it tests warnings that | ||||
| only get triggered on first hit.  This way we can ensure its imported into the | ||||
| top level and called first by the test suite. | ||||
| """ | ||||
| import unittest | ||||
| import warnings | ||||
|  | ||||
| from mongoengine import * | ||||
|  | ||||
|  | ||||
| class TestAllWarnings(unittest.TestCase): | ||||
|     def setUp(self): | ||||
|         connect(db="mongoenginetest") | ||||
|         self.warning_list = [] | ||||
|         self.showwarning_default = warnings.showwarning | ||||
|         warnings.showwarning = self.append_to_warning_list | ||||
|  | ||||
|     def append_to_warning_list(self, message, category, *args): | ||||
|         self.warning_list.append({"message": message, "category": category}) | ||||
|  | ||||
|     def tearDown(self): | ||||
|         # restore default handling of warnings | ||||
|         warnings.showwarning = self.showwarning_default | ||||
|  | ||||
|     def test_document_collection_syntax_warning(self): | ||||
|         class NonAbstractBase(Document): | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         class InheritedDocumentFailTest(NonAbstractBase): | ||||
|             meta = {"collection": "fail"} | ||||
|  | ||||
|         warning = self.warning_list[0] | ||||
|         assert SyntaxWarning == warning["category"] | ||||
|         assert "non_abstract_base" == InheritedDocumentFailTest._get_collection_name() | ||||
| @@ -0,0 +1,13 @@ | ||||
| import unittest | ||||
|  | ||||
| from class_methods import * | ||||
| from delta import * | ||||
| from dynamic import * | ||||
| from indexes import * | ||||
| from inheritance import * | ||||
| from instance import * | ||||
| from json_serialisation import * | ||||
| from validation import * | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
|   | ||||
							
								
								
									
										350
									
								
								tests/document/class_methods.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										350
									
								
								tests/document/class_methods.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,350 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import * | ||||
|  | ||||
| from mongoengine.queryset import NULLIFY, PULL | ||||
| from mongoengine.connection import get_db | ||||
|  | ||||
| __all__ = ("ClassMethodsTest", ) | ||||
|  | ||||
|  | ||||
| class ClassMethodsTest(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             age = IntField() | ||||
|  | ||||
|             non_field = True | ||||
|  | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         self.Person = Person | ||||
|  | ||||
|     def tearDown(self): | ||||
|         for collection in self.db.collection_names(): | ||||
|             if 'system.' in collection: | ||||
|                 continue | ||||
|             self.db.drop_collection(collection) | ||||
|  | ||||
|     def test_definition(self): | ||||
|         """Ensure that document may be defined using fields. | ||||
|         """ | ||||
|         self.assertEqual(['_cls', 'age', 'id', 'name'], | ||||
|                          sorted(self.Person._fields.keys())) | ||||
|         self.assertEqual(["IntField", "ObjectIdField", "StringField", "StringField"], | ||||
|                         sorted([x.__class__.__name__ for x in | ||||
|                                 self.Person._fields.values()])) | ||||
|  | ||||
|     def test_get_db(self): | ||||
|         """Ensure that get_db returns the expected db. | ||||
|         """ | ||||
|         db = self.Person._get_db() | ||||
|         self.assertEqual(self.db, db) | ||||
|  | ||||
|     def test_get_collection_name(self): | ||||
|         """Ensure that get_collection_name returns the expected collection | ||||
|         name. | ||||
|         """ | ||||
|         collection_name = 'person' | ||||
|         self.assertEqual(collection_name, self.Person._get_collection_name()) | ||||
|  | ||||
|     def test_get_collection(self): | ||||
|         """Ensure that get_collection returns the expected collection. | ||||
|         """ | ||||
|         collection_name = 'person' | ||||
|         collection = self.Person._get_collection() | ||||
|         self.assertEqual(self.db[collection_name], collection) | ||||
|  | ||||
|     def test_drop_collection(self): | ||||
|         """Ensure that the collection may be dropped from the database. | ||||
|         """ | ||||
|         collection_name = 'person' | ||||
|         self.Person(name='Test').save() | ||||
|         self.assertTrue(collection_name in self.db.collection_names()) | ||||
|  | ||||
|         self.Person.drop_collection() | ||||
|         self.assertFalse(collection_name in self.db.collection_names()) | ||||
|  | ||||
|     def test_register_delete_rule(self): | ||||
|         """Ensure that register delete rule adds a delete rule to the document | ||||
|         meta. | ||||
|         """ | ||||
|         class Job(Document): | ||||
|             employee = ReferenceField(self.Person) | ||||
|  | ||||
|         self.assertEqual(self.Person._meta.get('delete_rules'), None) | ||||
|  | ||||
|         self.Person.register_delete_rule(Job, 'employee', NULLIFY) | ||||
|         self.assertEqual(self.Person._meta['delete_rules'], | ||||
|                          {(Job, 'employee'): NULLIFY}) | ||||
|  | ||||
|     def test_compare_indexes(self): | ||||
|         """ Ensure that the indexes are properly created and that | ||||
|         compare_indexes identifies the missing/extra indexes | ||||
|         """ | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             author = StringField() | ||||
|             title = StringField() | ||||
|             description = StringField() | ||||
|             tags = StringField() | ||||
|  | ||||
|             meta = { | ||||
|                 'indexes': [('author', 'title')] | ||||
|             } | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         BlogPost.ensure_indexes() | ||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] }) | ||||
|  | ||||
|         BlogPost.ensure_index(['author', 'description']) | ||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [[('author', 1), ('description', 1)]] }) | ||||
|  | ||||
|         BlogPost._get_collection().drop_index('author_1_description_1') | ||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] }) | ||||
|  | ||||
|         BlogPost._get_collection().drop_index('author_1_title_1') | ||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [[('author', 1), ('title', 1)]], 'extra': [] }) | ||||
|  | ||||
|     def test_compare_indexes_inheritance(self): | ||||
|         """ Ensure that the indexes are properly created and that | ||||
|         compare_indexes identifies the missing/extra indexes for subclassed | ||||
|         documents (_cls included) | ||||
|         """ | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             author = StringField() | ||||
|             title = StringField() | ||||
|             description = StringField() | ||||
|  | ||||
|             meta = { | ||||
|                 'allow_inheritance': True | ||||
|             } | ||||
|  | ||||
|         class BlogPostWithTags(BlogPost): | ||||
|             tags = StringField() | ||||
|             tag_list = ListField(StringField()) | ||||
|  | ||||
|             meta = { | ||||
|                 'indexes': [('author', 'tags')] | ||||
|             } | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         BlogPost.ensure_indexes() | ||||
|         BlogPostWithTags.ensure_indexes() | ||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] }) | ||||
|  | ||||
|         BlogPostWithTags.ensure_index(['author', 'tag_list']) | ||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [[('_cls', 1), ('author', 1), ('tag_list', 1)]] }) | ||||
|  | ||||
|         BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tag_list_1') | ||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] }) | ||||
|  | ||||
|         BlogPostWithTags._get_collection().drop_index('_cls_1_author_1_tags_1') | ||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [[('_cls', 1), ('author', 1), ('tags', 1)]], 'extra': [] }) | ||||
|  | ||||
|     def test_compare_indexes_multiple_subclasses(self): | ||||
|         """ Ensure that compare_indexes behaves correctly if called from a | ||||
|         class, which base class has multiple subclasses | ||||
|         """ | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             author = StringField() | ||||
|             title = StringField() | ||||
|             description = StringField() | ||||
|  | ||||
|             meta = { | ||||
|                 'allow_inheritance': True | ||||
|             } | ||||
|  | ||||
|         class BlogPostWithTags(BlogPost): | ||||
|             tags = StringField() | ||||
|             tag_list = ListField(StringField()) | ||||
|  | ||||
|             meta = { | ||||
|                 'indexes': [('author', 'tags')] | ||||
|             } | ||||
|  | ||||
|         class BlogPostWithCustomField(BlogPost): | ||||
|             custom = DictField() | ||||
|  | ||||
|             meta = { | ||||
|                 'indexes': [('author', 'custom')] | ||||
|             } | ||||
|  | ||||
|         BlogPost.ensure_indexes() | ||||
|         BlogPostWithTags.ensure_indexes() | ||||
|         BlogPostWithCustomField.ensure_indexes() | ||||
|  | ||||
|         self.assertEqual(BlogPost.compare_indexes(), { 'missing': [], 'extra': [] }) | ||||
|         self.assertEqual(BlogPostWithTags.compare_indexes(), { 'missing': [], 'extra': [] }) | ||||
|         self.assertEqual(BlogPostWithCustomField.compare_indexes(), { 'missing': [], 'extra': [] }) | ||||
|  | ||||
|     def test_list_indexes_inheritance(self): | ||||
|         """ ensure that all of the indexes are listed regardless of the super- | ||||
|         or sub-class that we call it from | ||||
|         """ | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             author = StringField() | ||||
|             title = StringField() | ||||
|             description = StringField() | ||||
|  | ||||
|             meta = { | ||||
|                 'allow_inheritance': True | ||||
|             } | ||||
|  | ||||
|         class BlogPostWithTags(BlogPost): | ||||
|             tags = StringField() | ||||
|  | ||||
|             meta = { | ||||
|                 'indexes': [('author', 'tags')] | ||||
|             } | ||||
|  | ||||
|         class BlogPostWithTagsAndExtraText(BlogPostWithTags): | ||||
|             extra_text = StringField() | ||||
|  | ||||
|             meta = { | ||||
|                 'indexes': [('author', 'tags', 'extra_text')] | ||||
|             } | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         BlogPost.ensure_indexes() | ||||
|         BlogPostWithTags.ensure_indexes() | ||||
|         BlogPostWithTagsAndExtraText.ensure_indexes() | ||||
|  | ||||
|         self.assertEqual(BlogPost.list_indexes(), | ||||
|                          BlogPostWithTags.list_indexes()) | ||||
|         self.assertEqual(BlogPost.list_indexes(), | ||||
|                          BlogPostWithTagsAndExtraText.list_indexes()) | ||||
|         self.assertEqual(BlogPost.list_indexes(), | ||||
|                          [[('_cls', 1), ('author', 1), ('tags', 1)], | ||||
|                          [('_cls', 1), ('author', 1), ('tags', 1), ('extra_text', 1)], | ||||
|                          [(u'_id', 1)], [('_cls', 1)]]) | ||||
|  | ||||
|     def test_register_delete_rule_inherited(self): | ||||
|  | ||||
|         class Vaccine(Document): | ||||
|             name = StringField(required=True) | ||||
|  | ||||
|             meta = {"indexes": ["name"]} | ||||
|  | ||||
|         class Animal(Document): | ||||
|             family = StringField(required=True) | ||||
|             vaccine_made = ListField(ReferenceField("Vaccine", reverse_delete_rule=PULL)) | ||||
|  | ||||
|             meta = {"allow_inheritance": True, "indexes": ["family"]} | ||||
|  | ||||
|         class Cat(Animal): | ||||
|             name = StringField(required=True) | ||||
|  | ||||
|         self.assertEqual(Vaccine._meta['delete_rules'][(Animal, 'vaccine_made')], PULL) | ||||
|         self.assertEqual(Vaccine._meta['delete_rules'][(Cat, 'vaccine_made')], PULL) | ||||
|  | ||||
|     def test_collection_naming(self): | ||||
|         """Ensure that a collection with a specified name may be used. | ||||
|         """ | ||||
|  | ||||
|         class DefaultNamingTest(Document): | ||||
|             pass | ||||
|         self.assertEqual('default_naming_test', | ||||
|                          DefaultNamingTest._get_collection_name()) | ||||
|  | ||||
|         class CustomNamingTest(Document): | ||||
|             meta = {'collection': 'pimp_my_collection'} | ||||
|  | ||||
|         self.assertEqual('pimp_my_collection', | ||||
|                          CustomNamingTest._get_collection_name()) | ||||
|  | ||||
|         class DynamicNamingTest(Document): | ||||
|             meta = {'collection': lambda c: "DYNAMO"} | ||||
|         self.assertEqual('DYNAMO', DynamicNamingTest._get_collection_name()) | ||||
|  | ||||
|         # Use Abstract class to handle backwards compatibility | ||||
|         class BaseDocument(Document): | ||||
|             meta = { | ||||
|                 'abstract': True, | ||||
|                 'collection': lambda c: c.__name__.lower() | ||||
|             } | ||||
|  | ||||
|         class OldNamingConvention(BaseDocument): | ||||
|             pass | ||||
|         self.assertEqual('oldnamingconvention', | ||||
|                          OldNamingConvention._get_collection_name()) | ||||
|  | ||||
|         class InheritedAbstractNamingTest(BaseDocument): | ||||
|             meta = {'collection': 'wibble'} | ||||
|         self.assertEqual('wibble', | ||||
|                          InheritedAbstractNamingTest._get_collection_name()) | ||||
|  | ||||
|         # Mixin tests | ||||
|         class BaseMixin(object): | ||||
|             meta = { | ||||
|                 'collection': lambda c: c.__name__.lower() | ||||
|             } | ||||
|  | ||||
|         class OldMixinNamingConvention(Document, BaseMixin): | ||||
|             pass | ||||
|         self.assertEqual('oldmixinnamingconvention', | ||||
|                           OldMixinNamingConvention._get_collection_name()) | ||||
|  | ||||
|         class BaseMixin(object): | ||||
|             meta = { | ||||
|                 'collection': lambda c: c.__name__.lower() | ||||
|             } | ||||
|  | ||||
|         class BaseDocument(Document, BaseMixin): | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         class MyDocument(BaseDocument): | ||||
|             pass | ||||
|  | ||||
|         self.assertEqual('basedocument', MyDocument._get_collection_name()) | ||||
|  | ||||
|     def test_custom_collection_name_operations(self): | ||||
|         """Ensure that a collection with a specified name is used as expected. | ||||
|         """ | ||||
|         collection_name = 'personCollTest' | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             meta = {'collection': collection_name} | ||||
|  | ||||
|         Person(name="Test User").save() | ||||
|         self.assertTrue(collection_name in self.db.collection_names()) | ||||
|  | ||||
|         user_obj = self.db[collection_name].find_one() | ||||
|         self.assertEqual(user_obj['name'], "Test User") | ||||
|  | ||||
|         user_obj = Person.objects[0] | ||||
|         self.assertEqual(user_obj.name, "Test User") | ||||
|  | ||||
|         Person.drop_collection() | ||||
|         self.assertFalse(collection_name in self.db.collection_names()) | ||||
|  | ||||
|     def test_collection_name_and_primary(self): | ||||
|         """Ensure that a collection with a specified name may be used. | ||||
|         """ | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField(primary_key=True) | ||||
|             meta = {'collection': 'app'} | ||||
|  | ||||
|         Person(name="Test User").save() | ||||
|  | ||||
|         user_obj = Person.objects.first() | ||||
|         self.assertEqual(user_obj.name, "Test User") | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
							
								
								
									
										867
									
								
								tests/document/delta.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										867
									
								
								tests/document/delta.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,867 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import unittest | ||||
|  | ||||
| from bson import SON | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_db | ||||
|  | ||||
| __all__ = ("DeltaTest",) | ||||
|  | ||||
|  | ||||
| class DeltaTest(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             age = IntField() | ||||
|  | ||||
|             non_field = True | ||||
|  | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         self.Person = Person | ||||
|  | ||||
|     def tearDown(self): | ||||
|         for collection in self.db.collection_names(): | ||||
|             if 'system.' in collection: | ||||
|                 continue | ||||
|             self.db.drop_collection(collection) | ||||
|  | ||||
|     def test_delta(self): | ||||
|         self.delta(Document) | ||||
|         self.delta(DynamicDocument) | ||||
|  | ||||
|     def delta(self, DocClass): | ||||
|  | ||||
|         class Doc(DocClass): | ||||
|             string_field = StringField() | ||||
|             int_field = IntField() | ||||
|             dict_field = DictField() | ||||
|             list_field = ListField() | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|         doc = Doc() | ||||
|         doc.save() | ||||
|  | ||||
|         doc = Doc.objects.first() | ||||
|         self.assertEqual(doc._get_changed_fields(), []) | ||||
|         self.assertEqual(doc._delta(), ({}, {})) | ||||
|  | ||||
|         doc.string_field = 'hello' | ||||
|         self.assertEqual(doc._get_changed_fields(), ['string_field']) | ||||
|         self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         doc.int_field = 1 | ||||
|         self.assertEqual(doc._get_changed_fields(), ['int_field']) | ||||
|         self.assertEqual(doc._delta(), ({'int_field': 1}, {})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         dict_value = {'hello': 'world', 'ping': 'pong'} | ||||
|         doc.dict_field = dict_value | ||||
|         self.assertEqual(doc._get_changed_fields(), ['dict_field']) | ||||
|         self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         list_value = ['1', 2, {'hello': 'world'}] | ||||
|         doc.list_field = list_value | ||||
|         self.assertEqual(doc._get_changed_fields(), ['list_field']) | ||||
|         self.assertEqual(doc._delta(), ({'list_field': list_value}, {})) | ||||
|  | ||||
|         # Test unsetting | ||||
|         doc._changed_fields = [] | ||||
|         doc.dict_field = {} | ||||
|         self.assertEqual(doc._get_changed_fields(), ['dict_field']) | ||||
|         self.assertEqual(doc._delta(), ({}, {'dict_field': 1})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         doc.list_field = [] | ||||
|         self.assertEqual(doc._get_changed_fields(), ['list_field']) | ||||
|         self.assertEqual(doc._delta(), ({}, {'list_field': 1})) | ||||
|  | ||||
|     def test_delta_recursive(self): | ||||
|         self.delta_recursive(Document, EmbeddedDocument) | ||||
|         self.delta_recursive(DynamicDocument, EmbeddedDocument) | ||||
|         self.delta_recursive(Document, DynamicEmbeddedDocument) | ||||
|         self.delta_recursive(DynamicDocument, DynamicEmbeddedDocument) | ||||
|  | ||||
|     def delta_recursive(self, DocClass, EmbeddedClass): | ||||
|  | ||||
|         class Embedded(EmbeddedClass): | ||||
|             id = StringField() | ||||
|             string_field = StringField() | ||||
|             int_field = IntField() | ||||
|             dict_field = DictField() | ||||
|             list_field = ListField() | ||||
|  | ||||
|         class Doc(DocClass): | ||||
|             string_field = StringField() | ||||
|             int_field = IntField() | ||||
|             dict_field = DictField() | ||||
|             list_field = ListField() | ||||
|             embedded_field = EmbeddedDocumentField(Embedded) | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|         doc = Doc() | ||||
|         doc.save() | ||||
|  | ||||
|         doc = Doc.objects.first() | ||||
|         self.assertEqual(doc._get_changed_fields(), []) | ||||
|         self.assertEqual(doc._delta(), ({}, {})) | ||||
|  | ||||
|         embedded_1 = Embedded() | ||||
|         embedded_1.id = "010101" | ||||
|         embedded_1.string_field = 'hello' | ||||
|         embedded_1.int_field = 1 | ||||
|         embedded_1.dict_field = {'hello': 'world'} | ||||
|         embedded_1.list_field = ['1', 2, {'hello': 'world'}] | ||||
|         doc.embedded_field = embedded_1 | ||||
|  | ||||
|         self.assertEqual(doc._get_changed_fields(), ['embedded_field']) | ||||
|  | ||||
|         embedded_delta = { | ||||
|             'id': "010101", | ||||
|             'string_field': 'hello', | ||||
|             'int_field': 1, | ||||
|             'dict_field': {'hello': 'world'}, | ||||
|             'list_field': ['1', 2, {'hello': 'world'}] | ||||
|         } | ||||
|         self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {})) | ||||
|         self.assertEqual(doc._delta(), | ||||
|                          ({'embedded_field': embedded_delta}, {})) | ||||
|  | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         doc.embedded_field.dict_field = {} | ||||
|         self.assertEqual(doc._get_changed_fields(), | ||||
|                          ['embedded_field.dict_field']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), ({}, {'dict_field': 1})) | ||||
|         self.assertEqual(doc._delta(), ({}, {'embedded_field.dict_field': 1})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.dict_field, {}) | ||||
|  | ||||
|         doc.embedded_field.list_field = [] | ||||
|         self.assertEqual(doc._get_changed_fields(), | ||||
|                          ['embedded_field.list_field']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), ({}, {'list_field': 1})) | ||||
|         self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field': 1})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.list_field, []) | ||||
|  | ||||
|         embedded_2 = Embedded() | ||||
|         embedded_2.string_field = 'hello' | ||||
|         embedded_2.int_field = 1 | ||||
|         embedded_2.dict_field = {'hello': 'world'} | ||||
|         embedded_2.list_field = ['1', 2, {'hello': 'world'}] | ||||
|  | ||||
|         doc.embedded_field.list_field = ['1', 2, embedded_2] | ||||
|         self.assertEqual(doc._get_changed_fields(), | ||||
|                          ['embedded_field.list_field']) | ||||
|  | ||||
|         self.assertEqual(doc.embedded_field._delta(), ({ | ||||
|             'list_field': ['1', 2, { | ||||
|                 '_cls': 'Embedded', | ||||
|                 'string_field': 'hello', | ||||
|                 'dict_field': {'hello': 'world'}, | ||||
|                 'int_field': 1, | ||||
|                 'list_field': ['1', 2, {'hello': 'world'}], | ||||
|             }] | ||||
|         }, {})) | ||||
|  | ||||
|         self.assertEqual(doc._delta(), ({ | ||||
|             'embedded_field.list_field': ['1', 2, { | ||||
|                 '_cls': 'Embedded', | ||||
|                 'string_field': 'hello', | ||||
|                 'dict_field': {'hello': 'world'}, | ||||
|                 'int_field': 1, | ||||
|                 'list_field': ['1', 2, {'hello': 'world'}], | ||||
|             }] | ||||
|         }, {})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         self.assertEqual(doc.embedded_field.list_field[0], '1') | ||||
|         self.assertEqual(doc.embedded_field.list_field[1], 2) | ||||
|         for k in doc.embedded_field.list_field[2]._fields: | ||||
|             self.assertEqual(doc.embedded_field.list_field[2][k], | ||||
|                              embedded_2[k]) | ||||
|  | ||||
|         doc.embedded_field.list_field[2].string_field = 'world' | ||||
|         self.assertEqual(doc._get_changed_fields(), | ||||
|                          ['embedded_field.list_field.2.string_field']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), | ||||
|                          ({'list_field.2.string_field': 'world'}, {})) | ||||
|         self.assertEqual(doc._delta(), | ||||
|                          ({'embedded_field.list_field.2.string_field': 'world'}, {})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.list_field[2].string_field, | ||||
|                          'world') | ||||
|  | ||||
|         # Test multiple assignments | ||||
|         doc.embedded_field.list_field[2].string_field = 'hello world' | ||||
|         doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] | ||||
|         self.assertEqual(doc._get_changed_fields(), | ||||
|                          ['embedded_field.list_field.2']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), ({'list_field.2': { | ||||
|                 '_cls': 'Embedded', | ||||
|                 'string_field': 'hello world', | ||||
|                 'int_field': 1, | ||||
|                 'list_field': ['1', 2, {'hello': 'world'}], | ||||
|                 'dict_field': {'hello': 'world'}} | ||||
|             }, {})) | ||||
|         self.assertEqual(doc._delta(), ({'embedded_field.list_field.2': { | ||||
|                 '_cls': 'Embedded', | ||||
|                 'string_field': 'hello world', | ||||
|                 'int_field': 1, | ||||
|                 'list_field': ['1', 2, {'hello': 'world'}], | ||||
|                 'dict_field': {'hello': 'world'}} | ||||
|             }, {})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.list_field[2].string_field, | ||||
|                          'hello world') | ||||
|  | ||||
|         # Test list native methods | ||||
|         doc.embedded_field.list_field[2].list_field.pop(0) | ||||
|         self.assertEqual(doc._delta(), | ||||
|                          ({'embedded_field.list_field.2.list_field': | ||||
|                           [2, {'hello': 'world'}]}, {})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         doc.embedded_field.list_field[2].list_field.append(1) | ||||
|         self.assertEqual(doc._delta(), | ||||
|                          ({'embedded_field.list_field.2.list_field': | ||||
|                           [2, {'hello': 'world'}, 1]}, {})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.list_field[2].list_field, | ||||
|                          [2, {'hello': 'world'}, 1]) | ||||
|  | ||||
|         doc.embedded_field.list_field[2].list_field.sort(key=str) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.list_field[2].list_field, | ||||
|                          [1, 2, {'hello': 'world'}]) | ||||
|  | ||||
|         del doc.embedded_field.list_field[2].list_field[2]['hello'] | ||||
|         self.assertEqual(doc._delta(), | ||||
|                          ({}, {'embedded_field.list_field.2.list_field.2.hello': 1})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         del doc.embedded_field.list_field[2].list_field | ||||
|         self.assertEqual(doc._delta(), | ||||
|                          ({}, {'embedded_field.list_field.2.list_field': 1})) | ||||
|  | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         doc.dict_field['Embedded'] = embedded_1 | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         doc.dict_field['Embedded'].string_field = 'Hello World' | ||||
|         self.assertEqual(doc._get_changed_fields(), | ||||
|                          ['dict_field.Embedded.string_field']) | ||||
|         self.assertEqual(doc._delta(), | ||||
|                          ({'dict_field.Embedded.string_field': 'Hello World'}, {})) | ||||
|  | ||||
|     def test_circular_reference_deltas(self): | ||||
|         self.circular_reference_deltas(Document, Document) | ||||
|         self.circular_reference_deltas(Document, DynamicDocument) | ||||
|         self.circular_reference_deltas(DynamicDocument, Document) | ||||
|         self.circular_reference_deltas(DynamicDocument, DynamicDocument) | ||||
|  | ||||
|     def circular_reference_deltas(self, DocClass1, DocClass2): | ||||
|  | ||||
|         class Person(DocClass1): | ||||
|             name = StringField() | ||||
|             owns = ListField(ReferenceField('Organization')) | ||||
|  | ||||
|         class Organization(DocClass2): | ||||
|             name = StringField() | ||||
|             owner = ReferenceField('Person') | ||||
|  | ||||
|         Person.drop_collection() | ||||
|         Organization.drop_collection() | ||||
|  | ||||
|         person = Person(name="owner").save() | ||||
|         organization = Organization(name="company").save() | ||||
|  | ||||
|         person.owns.append(organization) | ||||
|         organization.owner = person | ||||
|  | ||||
|         person.save() | ||||
|         organization.save() | ||||
|  | ||||
|         p = Person.objects[0].select_related() | ||||
|         o = Organization.objects.first() | ||||
|         self.assertEqual(p.owns[0], o) | ||||
|         self.assertEqual(o.owner, p) | ||||
|  | ||||
|     def test_circular_reference_deltas_2(self): | ||||
|         self.circular_reference_deltas_2(Document, Document) | ||||
|         self.circular_reference_deltas_2(Document, DynamicDocument) | ||||
|         self.circular_reference_deltas_2(DynamicDocument, Document) | ||||
|         self.circular_reference_deltas_2(DynamicDocument, DynamicDocument) | ||||
|  | ||||
|     def circular_reference_deltas_2(self, DocClass1, DocClass2, dbref=True): | ||||
|  | ||||
|         class Person(DocClass1): | ||||
|             name = StringField() | ||||
|             owns = ListField(ReferenceField('Organization', dbref=dbref)) | ||||
|             employer = ReferenceField('Organization', dbref=dbref) | ||||
|  | ||||
|         class Organization(DocClass2): | ||||
|             name = StringField() | ||||
|             owner = ReferenceField('Person', dbref=dbref) | ||||
|             employees = ListField(ReferenceField('Person', dbref=dbref)) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|         Organization.drop_collection() | ||||
|  | ||||
|         person = Person(name="owner").save() | ||||
|         employee = Person(name="employee").save() | ||||
|         organization = Organization(name="company").save() | ||||
|  | ||||
|         person.owns.append(organization) | ||||
|         organization.owner = person | ||||
|  | ||||
|         organization.employees.append(employee) | ||||
|         employee.employer = organization | ||||
|  | ||||
|         person.save() | ||||
|         organization.save() | ||||
|         employee.save() | ||||
|  | ||||
|         p = Person.objects.get(name="owner") | ||||
|         e = Person.objects.get(name="employee") | ||||
|         o = Organization.objects.first() | ||||
|  | ||||
|         self.assertEqual(p.owns[0], o) | ||||
|         self.assertEqual(o.owner, p) | ||||
|         self.assertEqual(e.employer, o) | ||||
|  | ||||
|         return person, organization, employee | ||||
|  | ||||
|     def test_delta_db_field(self): | ||||
|         self.delta_db_field(Document) | ||||
|         self.delta_db_field(DynamicDocument) | ||||
|  | ||||
|     def delta_db_field(self, DocClass): | ||||
|  | ||||
|         class Doc(DocClass): | ||||
|             string_field = StringField(db_field='db_string_field') | ||||
|             int_field = IntField(db_field='db_int_field') | ||||
|             dict_field = DictField(db_field='db_dict_field') | ||||
|             list_field = ListField(db_field='db_list_field') | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|         doc = Doc() | ||||
|         doc.save() | ||||
|  | ||||
|         doc = Doc.objects.first() | ||||
|         self.assertEqual(doc._get_changed_fields(), []) | ||||
|         self.assertEqual(doc._delta(), ({}, {})) | ||||
|  | ||||
|         doc.string_field = 'hello' | ||||
|         self.assertEqual(doc._get_changed_fields(), ['db_string_field']) | ||||
|         self.assertEqual(doc._delta(), ({'db_string_field': 'hello'}, {})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         doc.int_field = 1 | ||||
|         self.assertEqual(doc._get_changed_fields(), ['db_int_field']) | ||||
|         self.assertEqual(doc._delta(), ({'db_int_field': 1}, {})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         dict_value = {'hello': 'world', 'ping': 'pong'} | ||||
|         doc.dict_field = dict_value | ||||
|         self.assertEqual(doc._get_changed_fields(), ['db_dict_field']) | ||||
|         self.assertEqual(doc._delta(), ({'db_dict_field': dict_value}, {})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         list_value = ['1', 2, {'hello': 'world'}] | ||||
|         doc.list_field = list_value | ||||
|         self.assertEqual(doc._get_changed_fields(), ['db_list_field']) | ||||
|         self.assertEqual(doc._delta(), ({'db_list_field': list_value}, {})) | ||||
|  | ||||
|         # Test unsetting | ||||
|         doc._changed_fields = [] | ||||
|         doc.dict_field = {} | ||||
|         self.assertEqual(doc._get_changed_fields(), ['db_dict_field']) | ||||
|         self.assertEqual(doc._delta(), ({}, {'db_dict_field': 1})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         doc.list_field = [] | ||||
|         self.assertEqual(doc._get_changed_fields(), ['db_list_field']) | ||||
|         self.assertEqual(doc._delta(), ({}, {'db_list_field': 1})) | ||||
|  | ||||
|         # Test it saves that data | ||||
|         doc = Doc() | ||||
|         doc.save() | ||||
|  | ||||
|         doc.string_field = 'hello' | ||||
|         doc.int_field = 1 | ||||
|         doc.dict_field = {'hello': 'world'} | ||||
|         doc.list_field = ['1', 2, {'hello': 'world'}] | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         self.assertEqual(doc.string_field, 'hello') | ||||
|         self.assertEqual(doc.int_field, 1) | ||||
|         self.assertEqual(doc.dict_field, {'hello': 'world'}) | ||||
|         self.assertEqual(doc.list_field, ['1', 2, {'hello': 'world'}]) | ||||
|  | ||||
|     def test_delta_recursive_db_field(self): | ||||
|         self.delta_recursive_db_field(Document, EmbeddedDocument) | ||||
|         self.delta_recursive_db_field(Document, DynamicEmbeddedDocument) | ||||
|         self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument) | ||||
|         self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument) | ||||
|  | ||||
|     def delta_recursive_db_field(self, DocClass, EmbeddedClass): | ||||
|  | ||||
|         class Embedded(EmbeddedClass): | ||||
|             string_field = StringField(db_field='db_string_field') | ||||
|             int_field = IntField(db_field='db_int_field') | ||||
|             dict_field = DictField(db_field='db_dict_field') | ||||
|             list_field = ListField(db_field='db_list_field') | ||||
|  | ||||
|         class Doc(DocClass): | ||||
|             string_field = StringField(db_field='db_string_field') | ||||
|             int_field = IntField(db_field='db_int_field') | ||||
|             dict_field = DictField(db_field='db_dict_field') | ||||
|             list_field = ListField(db_field='db_list_field') | ||||
|             embedded_field = EmbeddedDocumentField(Embedded, | ||||
|                                     db_field='db_embedded_field') | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|         doc = Doc() | ||||
|         doc.save() | ||||
|  | ||||
|         doc = Doc.objects.first() | ||||
|         self.assertEqual(doc._get_changed_fields(), []) | ||||
|         self.assertEqual(doc._delta(), ({}, {})) | ||||
|  | ||||
|         embedded_1 = Embedded() | ||||
|         embedded_1.string_field = 'hello' | ||||
|         embedded_1.int_field = 1 | ||||
|         embedded_1.dict_field = {'hello': 'world'} | ||||
|         embedded_1.list_field = ['1', 2, {'hello': 'world'}] | ||||
|         doc.embedded_field = embedded_1 | ||||
|  | ||||
|         self.assertEqual(doc._get_changed_fields(), ['db_embedded_field']) | ||||
|  | ||||
|         embedded_delta = { | ||||
|             'db_string_field': 'hello', | ||||
|             'db_int_field': 1, | ||||
|             'db_dict_field': {'hello': 'world'}, | ||||
|             'db_list_field': ['1', 2, {'hello': 'world'}] | ||||
|         } | ||||
|         self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {})) | ||||
|         self.assertEqual(doc._delta(), | ||||
|             ({'db_embedded_field': embedded_delta}, {})) | ||||
|  | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         doc.embedded_field.dict_field = {} | ||||
|         self.assertEqual(doc._get_changed_fields(), | ||||
|             ['db_embedded_field.db_dict_field']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), | ||||
|             ({}, {'db_dict_field': 1})) | ||||
|         self.assertEqual(doc._delta(), | ||||
|             ({}, {'db_embedded_field.db_dict_field': 1})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.dict_field, {}) | ||||
|  | ||||
|         doc.embedded_field.list_field = [] | ||||
|         self.assertEqual(doc._get_changed_fields(), | ||||
|             ['db_embedded_field.db_list_field']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), | ||||
|             ({}, {'db_list_field': 1})) | ||||
|         self.assertEqual(doc._delta(), | ||||
|             ({}, {'db_embedded_field.db_list_field': 1})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.list_field, []) | ||||
|  | ||||
|         embedded_2 = Embedded() | ||||
|         embedded_2.string_field = 'hello' | ||||
|         embedded_2.int_field = 1 | ||||
|         embedded_2.dict_field = {'hello': 'world'} | ||||
|         embedded_2.list_field = ['1', 2, {'hello': 'world'}] | ||||
|  | ||||
|         doc.embedded_field.list_field = ['1', 2, embedded_2] | ||||
|         self.assertEqual(doc._get_changed_fields(), | ||||
|             ['db_embedded_field.db_list_field']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), ({ | ||||
|             'db_list_field': ['1', 2, { | ||||
|                 '_cls': 'Embedded', | ||||
|                 'db_string_field': 'hello', | ||||
|                 'db_dict_field': {'hello': 'world'}, | ||||
|                 'db_int_field': 1, | ||||
|                 'db_list_field': ['1', 2, {'hello': 'world'}], | ||||
|             }] | ||||
|         }, {})) | ||||
|  | ||||
|         self.assertEqual(doc._delta(), ({ | ||||
|             'db_embedded_field.db_list_field': ['1', 2, { | ||||
|                 '_cls': 'Embedded', | ||||
|                 'db_string_field': 'hello', | ||||
|                 'db_dict_field': {'hello': 'world'}, | ||||
|                 'db_int_field': 1, | ||||
|                 'db_list_field': ['1', 2, {'hello': 'world'}], | ||||
|             }] | ||||
|         }, {})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         self.assertEqual(doc.embedded_field.list_field[0], '1') | ||||
|         self.assertEqual(doc.embedded_field.list_field[1], 2) | ||||
|         for k in doc.embedded_field.list_field[2]._fields: | ||||
|             self.assertEqual(doc.embedded_field.list_field[2][k], | ||||
|                              embedded_2[k]) | ||||
|  | ||||
|         doc.embedded_field.list_field[2].string_field = 'world' | ||||
|         self.assertEqual(doc._get_changed_fields(), | ||||
|             ['db_embedded_field.db_list_field.2.db_string_field']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), | ||||
|             ({'db_list_field.2.db_string_field': 'world'}, {})) | ||||
|         self.assertEqual(doc._delta(), | ||||
|             ({'db_embedded_field.db_list_field.2.db_string_field': 'world'}, | ||||
|              {})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.list_field[2].string_field, | ||||
|                         'world') | ||||
|  | ||||
|         # Test multiple assignments | ||||
|         doc.embedded_field.list_field[2].string_field = 'hello world' | ||||
|         doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] | ||||
|         self.assertEqual(doc._get_changed_fields(), | ||||
|             ['db_embedded_field.db_list_field.2']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), ({'db_list_field.2': { | ||||
|             '_cls': 'Embedded', | ||||
|             'db_string_field': 'hello world', | ||||
|             'db_int_field': 1, | ||||
|             'db_list_field': ['1', 2, {'hello': 'world'}], | ||||
|             'db_dict_field': {'hello': 'world'}}}, {})) | ||||
|         self.assertEqual(doc._delta(), ({ | ||||
|             'db_embedded_field.db_list_field.2': { | ||||
|                 '_cls': 'Embedded', | ||||
|                 'db_string_field': 'hello world', | ||||
|                 'db_int_field': 1, | ||||
|                 'db_list_field': ['1', 2, {'hello': 'world'}], | ||||
|                 'db_dict_field': {'hello': 'world'}} | ||||
|             }, {})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.list_field[2].string_field, | ||||
|                         'hello world') | ||||
|  | ||||
|         # Test list native methods | ||||
|         doc.embedded_field.list_field[2].list_field.pop(0) | ||||
|         self.assertEqual(doc._delta(), | ||||
|             ({'db_embedded_field.db_list_field.2.db_list_field': | ||||
|                 [2, {'hello': 'world'}]}, {})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         doc.embedded_field.list_field[2].list_field.append(1) | ||||
|         self.assertEqual(doc._delta(), | ||||
|             ({'db_embedded_field.db_list_field.2.db_list_field': | ||||
|                 [2, {'hello': 'world'}, 1]}, {})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.list_field[2].list_field, | ||||
|             [2, {'hello': 'world'}, 1]) | ||||
|  | ||||
|         doc.embedded_field.list_field[2].list_field.sort(key=str) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         self.assertEqual(doc.embedded_field.list_field[2].list_field, | ||||
|             [1, 2, {'hello': 'world'}]) | ||||
|  | ||||
|         del doc.embedded_field.list_field[2].list_field[2]['hello'] | ||||
|         self.assertEqual(doc._delta(), | ||||
|             ({}, {'db_embedded_field.db_list_field.2.db_list_field.2.hello': 1})) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         del doc.embedded_field.list_field[2].list_field | ||||
|         self.assertEqual(doc._delta(), ({}, | ||||
|             {'db_embedded_field.db_list_field.2.db_list_field': 1})) | ||||
|  | ||||
|     def test_delta_for_dynamic_documents(self): | ||||
|         class Person(DynamicDocument): | ||||
|             name = StringField() | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         p = Person(name="James", age=34) | ||||
|         self.assertEqual(p._delta(), ( | ||||
|             SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {})) | ||||
|  | ||||
|         p.doc = 123 | ||||
|         del p.doc | ||||
|         self.assertEqual(p._delta(), ( | ||||
|             SON([('_cls', 'Person'), ('name', 'James'), ('age', 34)]), {})) | ||||
|  | ||||
|         p = Person() | ||||
|         p.name = "Dean" | ||||
|         p.age = 22 | ||||
|         p.save() | ||||
|  | ||||
|         p.age = 24 | ||||
|         self.assertEqual(p.age, 24) | ||||
|         self.assertEqual(p._get_changed_fields(), ['age']) | ||||
|         self.assertEqual(p._delta(), ({'age': 24}, {})) | ||||
|  | ||||
|         p = Person.objects(age=22).get() | ||||
|         p.age = 24 | ||||
|         self.assertEqual(p.age, 24) | ||||
|         self.assertEqual(p._get_changed_fields(), ['age']) | ||||
|         self.assertEqual(p._delta(), ({'age': 24}, {})) | ||||
|  | ||||
|         p.save() | ||||
|         self.assertEqual(1, Person.objects(age=24).count()) | ||||
|  | ||||
|     def test_dynamic_delta(self): | ||||
|  | ||||
|         class Doc(DynamicDocument): | ||||
|             pass | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|         doc = Doc() | ||||
|         doc.save() | ||||
|  | ||||
|         doc = Doc.objects.first() | ||||
|         self.assertEqual(doc._get_changed_fields(), []) | ||||
|         self.assertEqual(doc._delta(), ({}, {})) | ||||
|  | ||||
|         doc.string_field = 'hello' | ||||
|         self.assertEqual(doc._get_changed_fields(), ['string_field']) | ||||
|         self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         doc.int_field = 1 | ||||
|         self.assertEqual(doc._get_changed_fields(), ['int_field']) | ||||
|         self.assertEqual(doc._delta(), ({'int_field': 1}, {})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         dict_value = {'hello': 'world', 'ping': 'pong'} | ||||
|         doc.dict_field = dict_value | ||||
|         self.assertEqual(doc._get_changed_fields(), ['dict_field']) | ||||
|         self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         list_value = ['1', 2, {'hello': 'world'}] | ||||
|         doc.list_field = list_value | ||||
|         self.assertEqual(doc._get_changed_fields(), ['list_field']) | ||||
|         self.assertEqual(doc._delta(), ({'list_field': list_value}, {})) | ||||
|  | ||||
|         # Test unsetting | ||||
|         doc._changed_fields = [] | ||||
|         doc.dict_field = {} | ||||
|         self.assertEqual(doc._get_changed_fields(), ['dict_field']) | ||||
|         self.assertEqual(doc._delta(), ({}, {'dict_field': 1})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         doc.list_field = [] | ||||
|         self.assertEqual(doc._get_changed_fields(), ['list_field']) | ||||
|         self.assertEqual(doc._delta(), ({}, {'list_field': 1})) | ||||
|  | ||||
|     def test_delta_with_dbref_true(self): | ||||
|         person, organization, employee = self.circular_reference_deltas_2(Document, Document, True) | ||||
|         employee.name = 'test' | ||||
|  | ||||
|         self.assertEqual(organization._get_changed_fields(), []) | ||||
|  | ||||
|         updates, removals = organization._delta() | ||||
|         self.assertEqual({}, removals) | ||||
|         self.assertEqual({}, updates) | ||||
|  | ||||
|         organization.employees.append(person) | ||||
|         updates, removals = organization._delta() | ||||
|         self.assertEqual({}, removals) | ||||
|         self.assertTrue('employees' in updates) | ||||
|  | ||||
|     def test_delta_with_dbref_false(self): | ||||
|         person, organization, employee = self.circular_reference_deltas_2(Document, Document, False) | ||||
|         employee.name = 'test' | ||||
|  | ||||
|         self.assertEqual(organization._get_changed_fields(), []) | ||||
|  | ||||
|         updates, removals = organization._delta() | ||||
|         self.assertEqual({}, removals) | ||||
|         self.assertEqual({}, updates) | ||||
|  | ||||
|         organization.employees.append(person) | ||||
|         updates, removals = organization._delta() | ||||
|         self.assertEqual({}, removals) | ||||
|         self.assertTrue('employees' in updates) | ||||
|  | ||||
|     def test_nested_nested_fields_mark_as_changed(self): | ||||
|         class EmbeddedDoc(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|  | ||||
|         class MyDoc(Document): | ||||
|             subs = MapField(MapField(EmbeddedDocumentField(EmbeddedDoc))) | ||||
|             name = StringField() | ||||
|  | ||||
|         MyDoc.drop_collection() | ||||
|  | ||||
|         mydoc = MyDoc(name='testcase1', subs={'a': {'b': EmbeddedDoc(name='foo')}}).save() | ||||
|  | ||||
|         mydoc = MyDoc.objects.first() | ||||
|         subdoc = mydoc.subs['a']['b'] | ||||
|         subdoc.name = 'bar' | ||||
|  | ||||
|         self.assertEqual(["name"], subdoc._get_changed_fields()) | ||||
|         self.assertEqual(["subs.a.b.name"], mydoc._get_changed_fields()) | ||||
|  | ||||
|         mydoc._clear_changed_fields() | ||||
|         self.assertEqual([], mydoc._get_changed_fields()) | ||||
|  | ||||
|     def test_lower_level_mark_as_changed(self): | ||||
|         class EmbeddedDoc(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|  | ||||
|         class MyDoc(Document): | ||||
|             subs = MapField(EmbeddedDocumentField(EmbeddedDoc)) | ||||
|  | ||||
|         MyDoc.drop_collection() | ||||
|  | ||||
|         MyDoc().save() | ||||
|  | ||||
|         mydoc = MyDoc.objects.first() | ||||
|         mydoc.subs['a'] = EmbeddedDoc() | ||||
|         self.assertEqual(["subs.a"], mydoc._get_changed_fields()) | ||||
|  | ||||
|         subdoc = mydoc.subs['a'] | ||||
|         subdoc.name = 'bar' | ||||
|  | ||||
|         self.assertEqual(["name"], subdoc._get_changed_fields()) | ||||
|         self.assertEqual(["subs.a"], mydoc._get_changed_fields()) | ||||
|         mydoc.save() | ||||
|  | ||||
|         mydoc._clear_changed_fields() | ||||
|         self.assertEqual([], mydoc._get_changed_fields()) | ||||
|  | ||||
|     def test_upper_level_mark_as_changed(self): | ||||
|         class EmbeddedDoc(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|  | ||||
|         class MyDoc(Document): | ||||
|             subs = MapField(EmbeddedDocumentField(EmbeddedDoc)) | ||||
|  | ||||
|         MyDoc.drop_collection() | ||||
|  | ||||
|         MyDoc(subs={'a': EmbeddedDoc(name='foo')}).save() | ||||
|  | ||||
|         mydoc = MyDoc.objects.first() | ||||
|         subdoc = mydoc.subs['a'] | ||||
|         subdoc.name = 'bar' | ||||
|  | ||||
|         self.assertEqual(["name"], subdoc._get_changed_fields()) | ||||
|         self.assertEqual(["subs.a.name"], mydoc._get_changed_fields()) | ||||
|  | ||||
|         mydoc.subs['a'] = EmbeddedDoc() | ||||
|         self.assertEqual(["subs.a"], mydoc._get_changed_fields()) | ||||
|         mydoc.save() | ||||
|  | ||||
|         mydoc._clear_changed_fields() | ||||
|         self.assertEqual([], mydoc._get_changed_fields()) | ||||
|  | ||||
|     def test_referenced_object_changed_attributes(self): | ||||
|         """Ensures that when you save a new reference to a field, the referenced object isn't altered""" | ||||
|  | ||||
|         class Organization(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         class User(Document): | ||||
|             name = StringField() | ||||
|             org = ReferenceField('Organization', required=True) | ||||
|  | ||||
|         Organization.drop_collection() | ||||
|         User.drop_collection() | ||||
|  | ||||
|         org1 = Organization(name='Org 1') | ||||
|         org1.save() | ||||
|  | ||||
|         org2 = Organization(name='Org 2') | ||||
|         org2.save() | ||||
|  | ||||
|         user = User(name='Fred', org=org1) | ||||
|         user.save() | ||||
|  | ||||
|         org1.reload() | ||||
|         org2.reload() | ||||
|         user.reload() | ||||
|         self.assertEqual(org1.name, 'Org 1') | ||||
|         self.assertEqual(org2.name, 'Org 2') | ||||
|         self.assertEqual(user.name, 'Fred') | ||||
|  | ||||
|         user.name = 'Harold' | ||||
|         user.org = org2 | ||||
|  | ||||
|         org2.name = 'New Org 2' | ||||
|         self.assertEqual(org2.name, 'New Org 2') | ||||
|  | ||||
|         user.save() | ||||
|         org2.save() | ||||
|  | ||||
|         self.assertEqual(org2.name, 'New Org 2') | ||||
|         org2.reload() | ||||
|         self.assertEqual(org2.name, 'New Org 2') | ||||
|  | ||||
|     def test_delta_for_nested_map_fields(self): | ||||
|         class UInfoDocument(Document): | ||||
|             phone = StringField() | ||||
|  | ||||
|         class EmbeddedRole(EmbeddedDocument): | ||||
|             type = StringField() | ||||
|  | ||||
|         class EmbeddedUser(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|             roles = MapField(field=EmbeddedDocumentField(EmbeddedRole)) | ||||
|             rolist = ListField(field=EmbeddedDocumentField(EmbeddedRole)) | ||||
|             info = ReferenceField(UInfoDocument) | ||||
|  | ||||
|         class Doc(Document): | ||||
|             users = MapField(field=EmbeddedDocumentField(EmbeddedUser)) | ||||
|             num = IntField(default=-1) | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|  | ||||
|         doc = Doc(num=1) | ||||
|         doc.users["007"] = EmbeddedUser(name="Agent007") | ||||
|         doc.save() | ||||
|  | ||||
|         uinfo = UInfoDocument(phone="79089269066") | ||||
|         uinfo.save() | ||||
|  | ||||
|         d = Doc.objects(num=1).first() | ||||
|         d.users["007"]["roles"]["666"] = EmbeddedRole(type="superadmin") | ||||
|         d.users["007"]["rolist"].append(EmbeddedRole(type="oops")) | ||||
|         d.users["007"]["info"] = uinfo | ||||
|         delta = d._delta() | ||||
|         self.assertEqual(True, "users.007.roles.666" in delta[0]) | ||||
|         self.assertEqual(True, "users.007.rolist" in delta[0]) | ||||
|         self.assertEqual(True, "users.007.info" in delta[0]) | ||||
|         self.assertEqual('superadmin', delta[0]["users.007.roles.666"]["type"]) | ||||
|         self.assertEqual('oops', delta[0]["users.007.rolist"][0]["type"]) | ||||
|         self.assertEqual(uinfo.id, delta[0]["users.007.info"]) | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
| @@ -1,20 +1,20 @@ | ||||
| import unittest | ||||
| 
 | ||||
| import pytest | ||||
| 
 | ||||
| from mongoengine import * | ||||
| from tests.utils import MongoDBTestCase | ||||
| from mongoengine.connection import get_db | ||||
| 
 | ||||
| __all__ = ("TestDynamicDocument",) | ||||
| __all__ = ("DynamicTest", ) | ||||
| 
 | ||||
| 
 | ||||
| class TestDynamicDocument(MongoDBTestCase): | ||||
| class DynamicTest(unittest.TestCase): | ||||
| 
 | ||||
|     def setUp(self): | ||||
|         super(TestDynamicDocument, self).setUp() | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
| 
 | ||||
|         class Person(DynamicDocument): | ||||
|             name = StringField() | ||||
|             meta = {"allow_inheritance": True} | ||||
|             meta = {'allow_inheritance': True} | ||||
| 
 | ||||
|         Person.drop_collection() | ||||
| 
 | ||||
| @@ -27,15 +27,16 @@ class TestDynamicDocument(MongoDBTestCase): | ||||
|         p.name = "James" | ||||
|         p.age = 34 | ||||
| 
 | ||||
|         assert p.to_mongo() == {"_cls": "Person", "name": "James", "age": 34} | ||||
|         assert p.to_mongo().keys() == ["_cls", "name", "age"] | ||||
|         self.assertEqual(p.to_mongo(), {"_cls": "Person", "name": "James", | ||||
|                                         "age": 34}) | ||||
|         self.assertEqual(p.to_mongo().keys(), ["_cls", "name", "age"]) | ||||
|         p.save() | ||||
|         assert p.to_mongo().keys() == ["_id", "_cls", "name", "age"] | ||||
|         self.assertEqual(p.to_mongo().keys(), ["_id", "_cls", "name", "age"]) | ||||
| 
 | ||||
|         assert self.Person.objects.first().age == 34 | ||||
|         self.assertEqual(self.Person.objects.first().age, 34) | ||||
| 
 | ||||
|         # Confirm no changes to self.Person | ||||
|         assert not hasattr(self.Person, "age") | ||||
|         self.assertFalse(hasattr(self.Person, 'age')) | ||||
| 
 | ||||
|     def test_change_scope_of_variable(self): | ||||
|         """Test changing the scope of a dynamic field has no adverse effects""" | ||||
| @@ -45,11 +46,11 @@ class TestDynamicDocument(MongoDBTestCase): | ||||
|         p.save() | ||||
| 
 | ||||
|         p = self.Person.objects.get() | ||||
|         p.misc = {"hello": "world"} | ||||
|         p.misc = {'hello': 'world'} | ||||
|         p.save() | ||||
| 
 | ||||
|         p = self.Person.objects.get() | ||||
|         assert p.misc == {"hello": "world"} | ||||
|         self.assertEqual(p.misc, {'hello': 'world'}) | ||||
| 
 | ||||
|     def test_delete_dynamic_field(self): | ||||
|         """Test deleting a dynamic field works""" | ||||
| @@ -60,23 +61,23 @@ class TestDynamicDocument(MongoDBTestCase): | ||||
|         p.save() | ||||
| 
 | ||||
|         p = self.Person.objects.get() | ||||
|         p.misc = {"hello": "world"} | ||||
|         p.misc = {'hello': 'world'} | ||||
|         p.save() | ||||
| 
 | ||||
|         p = self.Person.objects.get() | ||||
|         assert p.misc == {"hello": "world"} | ||||
|         self.assertEqual(p.misc, {'hello': 'world'}) | ||||
|         collection = self.db[self.Person._get_collection_name()] | ||||
|         obj = collection.find_one() | ||||
|         assert sorted(obj.keys()) == ["_cls", "_id", "misc", "name"] | ||||
|         self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'misc', 'name']) | ||||
| 
 | ||||
|         del p.misc | ||||
|         p.save() | ||||
| 
 | ||||
|         p = self.Person.objects.get() | ||||
|         assert not hasattr(p, "misc") | ||||
|         self.assertFalse(hasattr(p, 'misc')) | ||||
| 
 | ||||
|         obj = collection.find_one() | ||||
|         assert sorted(obj.keys()) == ["_cls", "_id", "name"] | ||||
|         self.assertEqual(sorted(obj.keys()), ['_cls', '_id', 'name']) | ||||
| 
 | ||||
|     def test_reload_after_unsetting(self): | ||||
|         p = self.Person() | ||||
| @@ -90,55 +91,12 @@ class TestDynamicDocument(MongoDBTestCase): | ||||
|         p = self.Person.objects.create() | ||||
|         p.update(age=1) | ||||
| 
 | ||||
|         assert len(p._data) == 3 | ||||
|         assert sorted(p._data.keys()) == ["_cls", "id", "name"] | ||||
|         self.assertEqual(len(p._data), 3) | ||||
|         self.assertEqual(sorted(p._data.keys()), ['_cls', 'id', 'name']) | ||||
| 
 | ||||
|         p.reload() | ||||
|         assert len(p._data) == 4 | ||||
|         assert sorted(p._data.keys()) == ["_cls", "age", "id", "name"] | ||||
| 
 | ||||
|     def test_fields_without_underscore(self): | ||||
|         """Ensure we can query dynamic fields""" | ||||
|         Person = self.Person | ||||
| 
 | ||||
|         p = self.Person(name="Dean") | ||||
|         p.save() | ||||
| 
 | ||||
|         raw_p = Person.objects.as_pymongo().get(id=p.id) | ||||
|         assert raw_p == {"_cls": u"Person", "_id": p.id, "name": u"Dean"} | ||||
| 
 | ||||
|         p.name = "OldDean" | ||||
|         p.newattr = "garbage" | ||||
|         p.save() | ||||
|         raw_p = Person.objects.as_pymongo().get(id=p.id) | ||||
|         assert raw_p == { | ||||
|             "_cls": u"Person", | ||||
|             "_id": p.id, | ||||
|             "name": "OldDean", | ||||
|             "newattr": u"garbage", | ||||
|         } | ||||
| 
 | ||||
|     def test_fields_containing_underscore(self): | ||||
|         """Ensure we can query dynamic fields""" | ||||
| 
 | ||||
|         class WeirdPerson(DynamicDocument): | ||||
|             name = StringField() | ||||
|             _name = StringField() | ||||
| 
 | ||||
|         WeirdPerson.drop_collection() | ||||
| 
 | ||||
|         p = WeirdPerson(name="Dean", _name="Dean") | ||||
|         p.save() | ||||
| 
 | ||||
|         raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id) | ||||
|         assert raw_p == {"_id": p.id, "_name": u"Dean", "name": u"Dean"} | ||||
| 
 | ||||
|         p.name = "OldDean" | ||||
|         p._name = "NewDean" | ||||
|         p._newattr1 = "garbage"  # Unknown fields won't be added | ||||
|         p.save() | ||||
|         raw_p = WeirdPerson.objects.as_pymongo().get(id=p.id) | ||||
|         assert raw_p == {"_id": p.id, "_name": u"NewDean", "name": u"OldDean"} | ||||
|         self.assertEqual(len(p._data), 4) | ||||
|         self.assertEqual(sorted(p._data.keys()), ['_cls', 'age', 'id', 'name']) | ||||
| 
 | ||||
|     def test_dynamic_document_queries(self): | ||||
|         """Ensure we can query dynamic fields""" | ||||
| @@ -147,10 +105,10 @@ class TestDynamicDocument(MongoDBTestCase): | ||||
|         p.age = 22 | ||||
|         p.save() | ||||
| 
 | ||||
|         assert 1 == self.Person.objects(age=22).count() | ||||
|         self.assertEqual(1, self.Person.objects(age=22).count()) | ||||
|         p = self.Person.objects(age=22) | ||||
|         p = p.get() | ||||
|         assert 22 == p.age | ||||
|         self.assertEqual(22, p.age) | ||||
| 
 | ||||
|     def test_complex_dynamic_document_queries(self): | ||||
|         class Person(DynamicDocument): | ||||
| @@ -170,25 +128,26 @@ class TestDynamicDocument(MongoDBTestCase): | ||||
|         p2.age = 10 | ||||
|         p2.save() | ||||
| 
 | ||||
|         assert Person.objects(age__icontains="ten").count() == 2 | ||||
|         assert Person.objects(age__gte=10).count() == 1 | ||||
|         self.assertEqual(Person.objects(age__icontains='ten').count(), 2) | ||||
|         self.assertEqual(Person.objects(age__gte=10).count(), 1) | ||||
| 
 | ||||
|     def test_complex_data_lookups(self): | ||||
|         """Ensure you can query dynamic document dynamic fields""" | ||||
|         p = self.Person() | ||||
|         p.misc = {"hello": "world"} | ||||
|         p.misc = {'hello': 'world'} | ||||
|         p.save() | ||||
| 
 | ||||
|         assert 1 == self.Person.objects(misc__hello="world").count() | ||||
|         self.assertEqual(1, self.Person.objects(misc__hello='world').count()) | ||||
| 
 | ||||
|     def test_three_level_complex_data_lookups(self): | ||||
|         """Ensure you can query three level document dynamic fields""" | ||||
|         self.Person.objects.create(misc={"hello": {"hello2": "world"}}) | ||||
|         assert 1 == self.Person.objects(misc__hello__hello2="world").count() | ||||
|         p = self.Person.objects.create( | ||||
|             misc={'hello': {'hello2': 'world'}} | ||||
|         ) | ||||
|         self.assertEqual(1, self.Person.objects(misc__hello__hello2='world').count()) | ||||
| 
 | ||||
|     def test_complex_embedded_document_validation(self): | ||||
|         """Ensure embedded dynamic documents may be validated""" | ||||
| 
 | ||||
|         class Embedded(DynamicEmbeddedDocument): | ||||
|             content = URLField() | ||||
| 
 | ||||
| @@ -198,29 +157,27 @@ class TestDynamicDocument(MongoDBTestCase): | ||||
|         Doc.drop_collection() | ||||
|         doc = Doc() | ||||
| 
 | ||||
|         embedded_doc_1 = Embedded(content="http://mongoengine.org") | ||||
|         embedded_doc_1 = Embedded(content='http://mongoengine.org') | ||||
|         embedded_doc_1.validate() | ||||
| 
 | ||||
|         embedded_doc_2 = Embedded(content="this is not a url") | ||||
|         with pytest.raises(ValidationError): | ||||
|             embedded_doc_2.validate() | ||||
|         embedded_doc_2 = Embedded(content='this is not a url') | ||||
|         self.assertRaises(ValidationError, embedded_doc_2.validate) | ||||
| 
 | ||||
|         doc.embedded_field_1 = embedded_doc_1 | ||||
|         doc.embedded_field_2 = embedded_doc_2 | ||||
|         with pytest.raises(ValidationError): | ||||
|             doc.validate() | ||||
|         self.assertRaises(ValidationError, doc.validate) | ||||
| 
 | ||||
|     def test_inheritance(self): | ||||
|         """Ensure that dynamic document plays nice with inheritance""" | ||||
| 
 | ||||
|         class Employee(self.Person): | ||||
|             salary = IntField() | ||||
| 
 | ||||
|         Employee.drop_collection() | ||||
| 
 | ||||
|         assert "name" in Employee._fields | ||||
|         assert "salary" in Employee._fields | ||||
|         assert Employee._get_collection_name() == self.Person._get_collection_name() | ||||
|         self.assertTrue('name' in Employee._fields) | ||||
|         self.assertTrue('salary' in Employee._fields) | ||||
|         self.assertEqual(Employee._get_collection_name(), | ||||
|                          self.Person._get_collection_name()) | ||||
| 
 | ||||
|         joe_bloggs = Employee() | ||||
|         joe_bloggs.name = "Joe Bloggs" | ||||
| @@ -228,15 +185,14 @@ class TestDynamicDocument(MongoDBTestCase): | ||||
|         joe_bloggs.age = 20 | ||||
|         joe_bloggs.save() | ||||
| 
 | ||||
|         assert 1 == self.Person.objects(age=20).count() | ||||
|         assert 1 == Employee.objects(age=20).count() | ||||
|         self.assertEqual(1, self.Person.objects(age=20).count()) | ||||
|         self.assertEqual(1, Employee.objects(age=20).count()) | ||||
| 
 | ||||
|         joe_bloggs = self.Person.objects.first() | ||||
|         assert isinstance(joe_bloggs, Employee) | ||||
|         self.assertTrue(isinstance(joe_bloggs, Employee)) | ||||
| 
 | ||||
|     def test_embedded_dynamic_document(self): | ||||
|         """Test dynamic embedded documents""" | ||||
| 
 | ||||
|         class Embedded(DynamicEmbeddedDocument): | ||||
|             pass | ||||
| 
 | ||||
| @@ -247,33 +203,33 @@ class TestDynamicDocument(MongoDBTestCase): | ||||
|         doc = Doc() | ||||
| 
 | ||||
|         embedded_1 = Embedded() | ||||
|         embedded_1.string_field = "hello" | ||||
|         embedded_1.string_field = 'hello' | ||||
|         embedded_1.int_field = 1 | ||||
|         embedded_1.dict_field = {"hello": "world"} | ||||
|         embedded_1.list_field = ["1", 2, {"hello": "world"}] | ||||
|         embedded_1.dict_field = {'hello': 'world'} | ||||
|         embedded_1.list_field = ['1', 2, {'hello': 'world'}] | ||||
|         doc.embedded_field = embedded_1 | ||||
| 
 | ||||
|         assert doc.to_mongo() == { | ||||
|         self.assertEqual(doc.to_mongo(), { | ||||
|             "embedded_field": { | ||||
|                 "_cls": "Embedded", | ||||
|                 "string_field": "hello", | ||||
|                 "int_field": 1, | ||||
|                 "dict_field": {"hello": "world"}, | ||||
|                 "list_field": ["1", 2, {"hello": "world"}], | ||||
|                 "list_field": ['1', 2, {'hello': 'world'}] | ||||
|             } | ||||
|         } | ||||
|         }) | ||||
|         doc.save() | ||||
| 
 | ||||
|         doc = Doc.objects.first() | ||||
|         assert doc.embedded_field.__class__ == Embedded | ||||
|         assert doc.embedded_field.string_field == "hello" | ||||
|         assert doc.embedded_field.int_field == 1 | ||||
|         assert doc.embedded_field.dict_field == {"hello": "world"} | ||||
|         assert doc.embedded_field.list_field == ["1", 2, {"hello": "world"}] | ||||
|         self.assertEqual(doc.embedded_field.__class__, Embedded) | ||||
|         self.assertEqual(doc.embedded_field.string_field, "hello") | ||||
|         self.assertEqual(doc.embedded_field.int_field, 1) | ||||
|         self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'}) | ||||
|         self.assertEqual(doc.embedded_field.list_field, | ||||
|                             ['1', 2, {'hello': 'world'}]) | ||||
| 
 | ||||
|     def test_complex_embedded_documents(self): | ||||
|         """Test complex dynamic embedded documents setups""" | ||||
| 
 | ||||
|         class Embedded(DynamicEmbeddedDocument): | ||||
|             pass | ||||
| 
 | ||||
| @@ -284,54 +240,51 @@ class TestDynamicDocument(MongoDBTestCase): | ||||
|         doc = Doc() | ||||
| 
 | ||||
|         embedded_1 = Embedded() | ||||
|         embedded_1.string_field = "hello" | ||||
|         embedded_1.string_field = 'hello' | ||||
|         embedded_1.int_field = 1 | ||||
|         embedded_1.dict_field = {"hello": "world"} | ||||
|         embedded_1.dict_field = {'hello': 'world'} | ||||
| 
 | ||||
|         embedded_2 = Embedded() | ||||
|         embedded_2.string_field = "hello" | ||||
|         embedded_2.string_field = 'hello' | ||||
|         embedded_2.int_field = 1 | ||||
|         embedded_2.dict_field = {"hello": "world"} | ||||
|         embedded_2.list_field = ["1", 2, {"hello": "world"}] | ||||
|         embedded_2.dict_field = {'hello': 'world'} | ||||
|         embedded_2.list_field = ['1', 2, {'hello': 'world'}] | ||||
| 
 | ||||
|         embedded_1.list_field = ["1", 2, embedded_2] | ||||
|         embedded_1.list_field = ['1', 2, embedded_2] | ||||
|         doc.embedded_field = embedded_1 | ||||
| 
 | ||||
|         assert doc.to_mongo() == { | ||||
|         self.assertEqual(doc.to_mongo(), { | ||||
|             "embedded_field": { | ||||
|                 "_cls": "Embedded", | ||||
|                 "string_field": "hello", | ||||
|                 "int_field": 1, | ||||
|                 "dict_field": {"hello": "world"}, | ||||
|                 "list_field": [ | ||||
|                     "1", | ||||
|                     2, | ||||
|                     { | ||||
|                         "_cls": "Embedded", | ||||
|                         "string_field": "hello", | ||||
|                         "int_field": 1, | ||||
|                         "dict_field": {"hello": "world"}, | ||||
|                         "list_field": ["1", 2, {"hello": "world"}], | ||||
|                     }, | ||||
|                 ], | ||||
|                 "list_field": ['1', 2, | ||||
|                     {"_cls": "Embedded", | ||||
|                     "string_field": "hello", | ||||
|                     "int_field": 1, | ||||
|                     "dict_field": {"hello": "world"}, | ||||
|                     "list_field": ['1', 2, {'hello': 'world'}]} | ||||
|                 ] | ||||
|             } | ||||
|         } | ||||
|         }) | ||||
|         doc.save() | ||||
|         doc = Doc.objects.first() | ||||
|         assert doc.embedded_field.__class__ == Embedded | ||||
|         assert doc.embedded_field.string_field == "hello" | ||||
|         assert doc.embedded_field.int_field == 1 | ||||
|         assert doc.embedded_field.dict_field == {"hello": "world"} | ||||
|         assert doc.embedded_field.list_field[0] == "1" | ||||
|         assert doc.embedded_field.list_field[1] == 2 | ||||
|         self.assertEqual(doc.embedded_field.__class__, Embedded) | ||||
|         self.assertEqual(doc.embedded_field.string_field, "hello") | ||||
|         self.assertEqual(doc.embedded_field.int_field, 1) | ||||
|         self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'}) | ||||
|         self.assertEqual(doc.embedded_field.list_field[0], '1') | ||||
|         self.assertEqual(doc.embedded_field.list_field[1], 2) | ||||
| 
 | ||||
|         embedded_field = doc.embedded_field.list_field[2] | ||||
| 
 | ||||
|         assert embedded_field.__class__ == Embedded | ||||
|         assert embedded_field.string_field == "hello" | ||||
|         assert embedded_field.int_field == 1 | ||||
|         assert embedded_field.dict_field == {"hello": "world"} | ||||
|         assert embedded_field.list_field == ["1", 2, {"hello": "world"}] | ||||
|         self.assertEqual(embedded_field.__class__, Embedded) | ||||
|         self.assertEqual(embedded_field.string_field, "hello") | ||||
|         self.assertEqual(embedded_field.int_field, 1) | ||||
|         self.assertEqual(embedded_field.dict_field, {'hello': 'world'}) | ||||
|         self.assertEqual(embedded_field.list_field, ['1', 2, | ||||
|                                                         {'hello': 'world'}]) | ||||
| 
 | ||||
|     def test_dynamic_and_embedded(self): | ||||
|         """Ensure embedded documents play nicely""" | ||||
| @@ -350,18 +303,18 @@ class TestDynamicDocument(MongoDBTestCase): | ||||
|         person.address.city = "Lundenne" | ||||
|         person.save() | ||||
| 
 | ||||
|         assert Person.objects.first().address.city == "Lundenne" | ||||
|         self.assertEqual(Person.objects.first().address.city, "Lundenne") | ||||
| 
 | ||||
|         person = Person.objects.first() | ||||
|         person.address = Address(city="Londinium") | ||||
|         person.save() | ||||
| 
 | ||||
|         assert Person.objects.first().address.city == "Londinium" | ||||
|         self.assertEqual(Person.objects.first().address.city, "Londinium") | ||||
| 
 | ||||
|         person = Person.objects.first() | ||||
|         person.age = 35 | ||||
|         person.save() | ||||
|         assert Person.objects.first().age == 35 | ||||
|         self.assertEqual(Person.objects.first().age, 35) | ||||
| 
 | ||||
|     def test_dynamic_embedded_works_with_only(self): | ||||
|         """Ensure custom fieldnames on a dynamic embedded document are found by qs.only()""" | ||||
| @@ -374,15 +327,10 @@ class TestDynamicDocument(MongoDBTestCase): | ||||
| 
 | ||||
|         Person.drop_collection() | ||||
| 
 | ||||
|         Person( | ||||
|             name="Eric", address=Address(city="San Francisco", street_number="1337") | ||||
|         ).save() | ||||
|         Person(name="Eric", address=Address(city="San Francisco", street_number="1337")).save() | ||||
| 
 | ||||
|         assert Person.objects.first().address.street_number == "1337" | ||||
|         assert ( | ||||
|             Person.objects.only("address__street_number").first().address.street_number | ||||
|             == "1337" | ||||
|         ) | ||||
|         self.assertEqual(Person.objects.first().address.street_number, '1337') | ||||
|         self.assertEqual(Person.objects.only('address__street_number').first().address.street_number, '1337') | ||||
| 
 | ||||
|     def test_dynamic_and_embedded_dict_access(self): | ||||
|         """Ensure embedded dynamic documents work with dict[] style access""" | ||||
| @@ -406,21 +354,20 @@ class TestDynamicDocument(MongoDBTestCase): | ||||
|         person["address"]["city"] = "Lundenne" | ||||
|         person.save() | ||||
| 
 | ||||
|         assert Person.objects.first().address.city == "Lundenne" | ||||
|         self.assertEqual(Person.objects.first().address.city, "Lundenne") | ||||
| 
 | ||||
|         assert Person.objects.first().phone == "555-1212" | ||||
|         self.assertEqual(Person.objects.first().phone, "555-1212") | ||||
| 
 | ||||
|         person = Person.objects.first() | ||||
|         person.address = Address(city="Londinium") | ||||
|         person.save() | ||||
| 
 | ||||
|         assert Person.objects.first().address.city == "Londinium" | ||||
|         self.assertEqual(Person.objects.first().address.city, "Londinium") | ||||
| 
 | ||||
|         person = Person.objects.first() | ||||
|         person["age"] = 35 | ||||
|         person.save() | ||||
|         assert Person.objects.first().age == 35 | ||||
|         self.assertEqual(Person.objects.first().age, 35) | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == "__main__": | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
							
								
								
									
										1020
									
								
								tests/document/indexes.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1020
									
								
								tests/document/indexes.py
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										512
									
								
								tests/document/inheritance.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										512
									
								
								tests/document/inheritance.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,512 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import unittest | ||||
| import warnings | ||||
|  | ||||
| from datetime import datetime | ||||
|  | ||||
| from tests.fixtures import Base | ||||
|  | ||||
| from mongoengine import Document, EmbeddedDocument, connect | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.fields import (BooleanField, GenericReferenceField, | ||||
|                                 IntField, StringField) | ||||
|  | ||||
| __all__ = ('InheritanceTest', ) | ||||
|  | ||||
|  | ||||
| class InheritanceTest(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
|  | ||||
|     def tearDown(self): | ||||
|         for collection in self.db.collection_names(): | ||||
|             if 'system.' in collection: | ||||
|                 continue | ||||
|             self.db.drop_collection(collection) | ||||
|  | ||||
|     def test_superclasses(self): | ||||
|         """Ensure that the correct list of superclasses is assembled. | ||||
|         """ | ||||
|         class Animal(Document): | ||||
|             meta = {'allow_inheritance': True} | ||||
|         class Fish(Animal): pass | ||||
|         class Guppy(Fish): pass | ||||
|         class Mammal(Animal): pass | ||||
|         class Dog(Mammal): pass | ||||
|         class Human(Mammal): pass | ||||
|  | ||||
|         self.assertEqual(Animal._superclasses, ()) | ||||
|         self.assertEqual(Fish._superclasses, ('Animal',)) | ||||
|         self.assertEqual(Guppy._superclasses, ('Animal', 'Animal.Fish')) | ||||
|         self.assertEqual(Mammal._superclasses, ('Animal',)) | ||||
|         self.assertEqual(Dog._superclasses, ('Animal', 'Animal.Mammal')) | ||||
|         self.assertEqual(Human._superclasses, ('Animal', 'Animal.Mammal')) | ||||
|  | ||||
|     def test_external_superclasses(self): | ||||
|         """Ensure that the correct list of super classes is assembled when | ||||
|         importing part of the model. | ||||
|         """ | ||||
|         class Animal(Base): pass | ||||
|         class Fish(Animal): pass | ||||
|         class Guppy(Fish): pass | ||||
|         class Mammal(Animal): pass | ||||
|         class Dog(Mammal): pass | ||||
|         class Human(Mammal): pass | ||||
|  | ||||
|         self.assertEqual(Animal._superclasses, ('Base', )) | ||||
|         self.assertEqual(Fish._superclasses, ('Base', 'Base.Animal',)) | ||||
|         self.assertEqual(Guppy._superclasses, ('Base', 'Base.Animal', | ||||
|                                                'Base.Animal.Fish')) | ||||
|         self.assertEqual(Mammal._superclasses, ('Base', 'Base.Animal',)) | ||||
|         self.assertEqual(Dog._superclasses, ('Base', 'Base.Animal', | ||||
|                                              'Base.Animal.Mammal')) | ||||
|         self.assertEqual(Human._superclasses, ('Base', 'Base.Animal', | ||||
|                                                'Base.Animal.Mammal')) | ||||
|  | ||||
|     def test_subclasses(self): | ||||
|         """Ensure that the correct list of _subclasses (subclasses) is | ||||
|         assembled. | ||||
|         """ | ||||
|         class Animal(Document): | ||||
|             meta = {'allow_inheritance': True} | ||||
|         class Fish(Animal): pass | ||||
|         class Guppy(Fish): pass | ||||
|         class Mammal(Animal): pass | ||||
|         class Dog(Mammal): pass | ||||
|         class Human(Mammal): pass | ||||
|  | ||||
|         self.assertEqual(Animal._subclasses, ('Animal', | ||||
|                                          'Animal.Fish', | ||||
|                                          'Animal.Fish.Guppy', | ||||
|                                          'Animal.Mammal', | ||||
|                                          'Animal.Mammal.Dog', | ||||
|                                          'Animal.Mammal.Human')) | ||||
|         self.assertEqual(Fish._subclasses, ('Animal.Fish', | ||||
|                                        'Animal.Fish.Guppy',)) | ||||
|         self.assertEqual(Guppy._subclasses, ('Animal.Fish.Guppy',)) | ||||
|         self.assertEqual(Mammal._subclasses, ('Animal.Mammal', | ||||
|                                          'Animal.Mammal.Dog', | ||||
|                                          'Animal.Mammal.Human')) | ||||
|         self.assertEqual(Human._subclasses, ('Animal.Mammal.Human',)) | ||||
|  | ||||
|     def test_external_subclasses(self): | ||||
|         """Ensure that the correct list of _subclasses (subclasses) is | ||||
|         assembled when importing part of the model. | ||||
|         """ | ||||
|         class Animal(Base): pass | ||||
|         class Fish(Animal): pass | ||||
|         class Guppy(Fish): pass | ||||
|         class Mammal(Animal): pass | ||||
|         class Dog(Mammal): pass | ||||
|         class Human(Mammal): pass | ||||
|  | ||||
|         self.assertEqual(Animal._subclasses, ('Base.Animal', | ||||
|                                               'Base.Animal.Fish', | ||||
|                                               'Base.Animal.Fish.Guppy', | ||||
|                                               'Base.Animal.Mammal', | ||||
|                                               'Base.Animal.Mammal.Dog', | ||||
|                                               'Base.Animal.Mammal.Human')) | ||||
|         self.assertEqual(Fish._subclasses, ('Base.Animal.Fish', | ||||
|                                             'Base.Animal.Fish.Guppy',)) | ||||
|         self.assertEqual(Guppy._subclasses, ('Base.Animal.Fish.Guppy',)) | ||||
|         self.assertEqual(Mammal._subclasses, ('Base.Animal.Mammal', | ||||
|                                               'Base.Animal.Mammal.Dog', | ||||
|                                               'Base.Animal.Mammal.Human')) | ||||
|         self.assertEqual(Human._subclasses, ('Base.Animal.Mammal.Human',)) | ||||
|  | ||||
|     def test_dynamic_declarations(self): | ||||
|         """Test that declaring an extra class updates meta data""" | ||||
|  | ||||
|         class Animal(Document): | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         self.assertEqual(Animal._superclasses, ()) | ||||
|         self.assertEqual(Animal._subclasses, ('Animal',)) | ||||
|  | ||||
|         # Test dynamically adding a class changes the meta data | ||||
|         class Fish(Animal): | ||||
|             pass | ||||
|  | ||||
|         self.assertEqual(Animal._superclasses, ()) | ||||
|         self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish')) | ||||
|  | ||||
|         self.assertEqual(Fish._superclasses, ('Animal', )) | ||||
|         self.assertEqual(Fish._subclasses, ('Animal.Fish',)) | ||||
|  | ||||
|         # Test dynamically adding an inherited class changes the meta data | ||||
|         class Pike(Fish): | ||||
|             pass | ||||
|  | ||||
|         self.assertEqual(Animal._superclasses, ()) | ||||
|         self.assertEqual(Animal._subclasses, ('Animal', 'Animal.Fish', | ||||
|                                               'Animal.Fish.Pike')) | ||||
|  | ||||
|         self.assertEqual(Fish._superclasses, ('Animal', )) | ||||
|         self.assertEqual(Fish._subclasses, ('Animal.Fish', 'Animal.Fish.Pike')) | ||||
|  | ||||
|         self.assertEqual(Pike._superclasses, ('Animal', 'Animal.Fish')) | ||||
|         self.assertEqual(Pike._subclasses, ('Animal.Fish.Pike',)) | ||||
|  | ||||
|     def test_inheritance_meta_data(self): | ||||
|         """Ensure that document may inherit fields from a superclass document. | ||||
|         """ | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             age = IntField() | ||||
|  | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         class Employee(Person): | ||||
|             salary = IntField() | ||||
|  | ||||
|         self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'], | ||||
|                          sorted(Employee._fields.keys())) | ||||
|         self.assertEqual(Employee._get_collection_name(), | ||||
|                          Person._get_collection_name()) | ||||
|  | ||||
|     def test_inheritance_to_mongo_keys(self): | ||||
|         """Ensure that document may inherit fields from a superclass document. | ||||
|         """ | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             age = IntField() | ||||
|  | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         class Employee(Person): | ||||
|             salary = IntField() | ||||
|  | ||||
|         self.assertEqual(['_cls', 'age', 'id', 'name', 'salary'], | ||||
|                          sorted(Employee._fields.keys())) | ||||
|         self.assertEqual(Person(name="Bob", age=35).to_mongo().keys(), | ||||
|                          ['_cls', 'name', 'age']) | ||||
|         self.assertEqual(Employee(name="Bob", age=35, salary=0).to_mongo().keys(), | ||||
|                          ['_cls', 'name', 'age', 'salary']) | ||||
|         self.assertEqual(Employee._get_collection_name(), | ||||
|                          Person._get_collection_name()) | ||||
|  | ||||
|     def test_indexes_and_multiple_inheritance(self): | ||||
|         """ Ensure that all of the indexes are created for a document with | ||||
|         multiple inheritance. | ||||
|         """ | ||||
|  | ||||
|         class A(Document): | ||||
|             a = StringField() | ||||
|  | ||||
|             meta = { | ||||
|                 'allow_inheritance': True, | ||||
|                 'indexes': ['a'] | ||||
|             } | ||||
|  | ||||
|         class B(Document): | ||||
|             b = StringField() | ||||
|  | ||||
|             meta = { | ||||
|                 'allow_inheritance': True, | ||||
|                 'indexes': ['b'] | ||||
|             } | ||||
|  | ||||
|         class C(A, B): | ||||
|             pass | ||||
|  | ||||
|         A.drop_collection() | ||||
|         B.drop_collection() | ||||
|         C.drop_collection() | ||||
|  | ||||
|         C.ensure_indexes() | ||||
|  | ||||
|         self.assertEqual( | ||||
|             sorted([idx['key'] for idx in C._get_collection().index_information().values()]), | ||||
|             sorted([[(u'_cls', 1), (u'b', 1)], [(u'_id', 1)], [(u'_cls', 1), (u'a', 1)]]) | ||||
|         ) | ||||
|  | ||||
|     def test_polymorphic_queries(self): | ||||
|         """Ensure that the correct subclasses are returned from a query | ||||
|         """ | ||||
|  | ||||
|         class Animal(Document): | ||||
|             meta = {'allow_inheritance': True} | ||||
|         class Fish(Animal): pass | ||||
|         class Mammal(Animal): pass | ||||
|         class Dog(Mammal): pass | ||||
|         class Human(Mammal): pass | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|  | ||||
|         Animal().save() | ||||
|         Fish().save() | ||||
|         Mammal().save() | ||||
|         Dog().save() | ||||
|         Human().save() | ||||
|  | ||||
|         classes = [obj.__class__ for obj in Animal.objects] | ||||
|         self.assertEqual(classes, [Animal, Fish, Mammal, Dog, Human]) | ||||
|  | ||||
|         classes = [obj.__class__ for obj in Mammal.objects] | ||||
|         self.assertEqual(classes, [Mammal, Dog, Human]) | ||||
|  | ||||
|         classes = [obj.__class__ for obj in Human.objects] | ||||
|         self.assertEqual(classes, [Human]) | ||||
|  | ||||
|     def test_allow_inheritance(self): | ||||
|         """Ensure that inheritance is disabled by default on simple | ||||
|         classes and that _cls will not be used. | ||||
|         """ | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         # can't inherit because Animal didn't explicitly allow inheritance | ||||
|         with self.assertRaises(ValueError): | ||||
|             class Dog(Animal): | ||||
|                 pass | ||||
|  | ||||
|         # Check that _cls etc aren't present on simple documents | ||||
|         dog = Animal(name='dog').save() | ||||
|         self.assertEqual(dog.to_mongo().keys(), ['_id', 'name']) | ||||
|  | ||||
|         collection = self.db[Animal._get_collection_name()] | ||||
|         obj = collection.find_one() | ||||
|         self.assertFalse('_cls' in obj) | ||||
|  | ||||
|     def test_cant_turn_off_inheritance_on_subclass(self): | ||||
|         """Ensure if inheritance is on in a subclass you cant turn it off. | ||||
|         """ | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         with self.assertRaises(ValueError): | ||||
|             class Mammal(Animal): | ||||
|                 meta = {'allow_inheritance': False} | ||||
|  | ||||
|     def test_allow_inheritance_abstract_document(self): | ||||
|         """Ensure that abstract documents can set inheritance rules and that | ||||
|         _cls will not be used. | ||||
|         """ | ||||
|         class FinalDocument(Document): | ||||
|             meta = {'abstract': True, | ||||
|                     'allow_inheritance': False} | ||||
|  | ||||
|         class Animal(FinalDocument): | ||||
|             name = StringField() | ||||
|  | ||||
|         with self.assertRaises(ValueError): | ||||
|             class Mammal(Animal): | ||||
|                 pass | ||||
|  | ||||
|         # Check that _cls isn't present in simple documents | ||||
|         doc = Animal(name='dog') | ||||
|         self.assertFalse('_cls' in doc.to_mongo()) | ||||
|  | ||||
|     def test_abstract_handle_ids_in_metaclass_properly(self): | ||||
|  | ||||
|         class City(Document): | ||||
|             continent = StringField() | ||||
|             meta = {'abstract': True, | ||||
|                     'allow_inheritance': False} | ||||
|  | ||||
|         class EuropeanCity(City): | ||||
|             name = StringField() | ||||
|  | ||||
|         berlin = EuropeanCity(name='Berlin', continent='Europe') | ||||
|         self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered)) | ||||
|         self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered)) | ||||
|         self.assertEqual(len(berlin._fields_ordered), 3) | ||||
|         self.assertEqual(berlin._fields_ordered[0], 'id') | ||||
|  | ||||
|     def test_auto_id_not_set_if_specific_in_parent_class(self): | ||||
|  | ||||
|         class City(Document): | ||||
|             continent = StringField() | ||||
|             city_id = IntField(primary_key=True) | ||||
|             meta = {'abstract': True, | ||||
|                     'allow_inheritance': False} | ||||
|  | ||||
|         class EuropeanCity(City): | ||||
|             name = StringField() | ||||
|  | ||||
|         berlin = EuropeanCity(name='Berlin', continent='Europe') | ||||
|         self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered)) | ||||
|         self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered)) | ||||
|         self.assertEqual(len(berlin._fields_ordered), 3) | ||||
|         self.assertEqual(berlin._fields_ordered[0], 'city_id') | ||||
|  | ||||
|     def test_auto_id_vs_non_pk_id_field(self): | ||||
|  | ||||
|         class City(Document): | ||||
|             continent = StringField() | ||||
|             id = IntField() | ||||
|             meta = {'abstract': True, | ||||
|                     'allow_inheritance': False} | ||||
|  | ||||
|         class EuropeanCity(City): | ||||
|             name = StringField() | ||||
|  | ||||
|         berlin = EuropeanCity(name='Berlin', continent='Europe') | ||||
|         self.assertEqual(len(berlin._db_field_map), len(berlin._fields_ordered)) | ||||
|         self.assertEqual(len(berlin._reverse_db_field_map), len(berlin._fields_ordered)) | ||||
|         self.assertEqual(len(berlin._fields_ordered), 4) | ||||
|         self.assertEqual(berlin._fields_ordered[0], 'auto_id_0') | ||||
|         berlin.save() | ||||
|         self.assertEqual(berlin.pk, berlin.auto_id_0) | ||||
|  | ||||
|     def test_abstract_document_creation_does_not_fail(self): | ||||
|         class City(Document): | ||||
|             continent = StringField() | ||||
|             meta = {'abstract': True, | ||||
|                     'allow_inheritance': False} | ||||
|  | ||||
|         bkk = City(continent='asia') | ||||
|         self.assertEqual(None, bkk.pk) | ||||
|         # TODO: expected error? Shouldn't we create a new error type? | ||||
|         with self.assertRaises(KeyError): | ||||
|             setattr(bkk, 'pk', 1) | ||||
|  | ||||
|     def test_allow_inheritance_embedded_document(self): | ||||
|         """Ensure embedded documents respect inheritance.""" | ||||
|         class Comment(EmbeddedDocument): | ||||
|             content = StringField() | ||||
|  | ||||
|         with self.assertRaises(ValueError): | ||||
|             class SpecialComment(Comment): | ||||
|                 pass | ||||
|  | ||||
|         doc = Comment(content='test') | ||||
|         self.assertFalse('_cls' in doc.to_mongo()) | ||||
|  | ||||
|         class Comment(EmbeddedDocument): | ||||
|             content = StringField() | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         doc = Comment(content='test') | ||||
|         self.assertTrue('_cls' in doc.to_mongo()) | ||||
|  | ||||
|     def test_document_inheritance(self): | ||||
|         """Ensure mutliple inheritance of abstract documents | ||||
|         """ | ||||
|         class DateCreatedDocument(Document): | ||||
|             meta = { | ||||
|                 'allow_inheritance': True, | ||||
|                 'abstract': True, | ||||
|             } | ||||
|  | ||||
|         class DateUpdatedDocument(Document): | ||||
|             meta = { | ||||
|                 'allow_inheritance': True, | ||||
|                 'abstract': True, | ||||
|             } | ||||
|  | ||||
|         try: | ||||
|             class MyDocument(DateCreatedDocument, DateUpdatedDocument): | ||||
|                 pass | ||||
|         except Exception: | ||||
|             self.assertTrue(False, "Couldn't create MyDocument class") | ||||
|  | ||||
|     def test_abstract_documents(self): | ||||
|         """Ensure that a document superclass can be marked as abstract | ||||
|         thereby not using it as the name for the collection.""" | ||||
|  | ||||
|         defaults = {'index_background': True, | ||||
|                     'index_drop_dups': True, | ||||
|                     'index_opts': {'hello': 'world'}, | ||||
|                     'allow_inheritance': True, | ||||
|                     'queryset_class': 'QuerySet', | ||||
|                     'db_alias': 'myDB', | ||||
|                     'shard_key': ('hello', 'world')} | ||||
|  | ||||
|         meta_settings = {'abstract': True} | ||||
|         meta_settings.update(defaults) | ||||
|  | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             meta = meta_settings | ||||
|  | ||||
|         class Fish(Animal): pass | ||||
|         class Guppy(Fish): pass | ||||
|  | ||||
|         class Mammal(Animal): | ||||
|             meta = {'abstract': True} | ||||
|         class Human(Mammal): pass | ||||
|  | ||||
|         for k, v in defaults.iteritems(): | ||||
|             for cls in [Animal, Fish, Guppy]: | ||||
|                 self.assertEqual(cls._meta[k], v) | ||||
|  | ||||
|         self.assertFalse('collection' in Animal._meta) | ||||
|         self.assertFalse('collection' in Mammal._meta) | ||||
|  | ||||
|         self.assertEqual(Animal._get_collection_name(), None) | ||||
|         self.assertEqual(Mammal._get_collection_name(), None) | ||||
|  | ||||
|         self.assertEqual(Fish._get_collection_name(), 'fish') | ||||
|         self.assertEqual(Guppy._get_collection_name(), 'fish') | ||||
|         self.assertEqual(Human._get_collection_name(), 'human') | ||||
|  | ||||
|         # ensure that a subclass of a non-abstract class can't be abstract | ||||
|         with self.assertRaises(ValueError): | ||||
|             class EvilHuman(Human): | ||||
|                 evil = BooleanField(default=True) | ||||
|                 meta = {'abstract': True} | ||||
|  | ||||
|     def test_abstract_embedded_documents(self): | ||||
|         # 789: EmbeddedDocument shouldn't inherit abstract | ||||
|         class A(EmbeddedDocument): | ||||
|             meta = {"abstract": True} | ||||
|  | ||||
|         class B(A): | ||||
|             pass | ||||
|  | ||||
|         self.assertFalse(B._meta["abstract"]) | ||||
|  | ||||
|     def test_inherited_collections(self): | ||||
|         """Ensure that subclassed documents don't override parents' | ||||
|         collections | ||||
|         """ | ||||
|  | ||||
|         class Drink(Document): | ||||
|             name = StringField() | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         class Drinker(Document): | ||||
|             drink = GenericReferenceField() | ||||
|  | ||||
|         try: | ||||
|             warnings.simplefilter("error") | ||||
|  | ||||
|             class AcloholicDrink(Drink): | ||||
|                 meta = {'collection': 'booze'} | ||||
|  | ||||
|         except SyntaxWarning: | ||||
|             warnings.simplefilter("ignore") | ||||
|  | ||||
|             class AlcoholicDrink(Drink): | ||||
|                 meta = {'collection': 'booze'} | ||||
|  | ||||
|         else: | ||||
|             raise AssertionError("SyntaxWarning should be triggered") | ||||
|  | ||||
|         warnings.resetwarnings() | ||||
|  | ||||
|         Drink.drop_collection() | ||||
|         AlcoholicDrink.drop_collection() | ||||
|         Drinker.drop_collection() | ||||
|  | ||||
|         red_bull = Drink(name='Red Bull') | ||||
|         red_bull.save() | ||||
|  | ||||
|         programmer = Drinker(drink=red_bull) | ||||
|         programmer.save() | ||||
|  | ||||
|         beer = AlcoholicDrink(name='Beer') | ||||
|         beer.save() | ||||
|         real_person = Drinker(drink=beer) | ||||
|         real_person.save() | ||||
|  | ||||
|         self.assertEqual(Drinker.objects[0].drink.name, red_bull.name) | ||||
|         self.assertEqual(Drinker.objects[1].drink.name, beer.name) | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,14 +1,22 @@ | ||||
| import unittest | ||||
| import uuid | ||||
| 
 | ||||
| from nose.plugins.skip import SkipTest | ||||
| from datetime import datetime | ||||
| from bson import ObjectId | ||||
| 
 | ||||
| import pymongo | ||||
| 
 | ||||
| from mongoengine import * | ||||
| from tests.utils import MongoDBTestCase | ||||
| 
 | ||||
| __all__ = ("TestJson",) | ||||
| 
 | ||||
| 
 | ||||
| class TestJson(MongoDBTestCase): | ||||
| class TestJson(unittest.TestCase): | ||||
| 
 | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
| 
 | ||||
|     def test_json_names(self): | ||||
|         """ | ||||
|         Going to test reported issue: | ||||
| @@ -17,24 +25,22 @@ class TestJson(MongoDBTestCase): | ||||
|         a to_json with the original class names and not the abreviated | ||||
|         mongodb document keys | ||||
|         """ | ||||
| 
 | ||||
|         class Embedded(EmbeddedDocument): | ||||
|             string = StringField(db_field="s") | ||||
|             string = StringField(db_field='s') | ||||
| 
 | ||||
|         class Doc(Document): | ||||
|             string = StringField(db_field="s") | ||||
|             embedded = EmbeddedDocumentField(Embedded, db_field="e") | ||||
|             string = StringField(db_field='s') | ||||
|             embedded = EmbeddedDocumentField(Embedded, db_field='e') | ||||
| 
 | ||||
|         doc = Doc(string="Hello", embedded=Embedded(string="Inner Hello")) | ||||
|         doc_json = doc.to_json( | ||||
|             sort_keys=True, use_db_field=False, separators=(",", ":") | ||||
|         ) | ||||
|         doc = Doc( string="Hello", embedded=Embedded(string="Inner Hello")) | ||||
|         doc_json = doc.to_json(sort_keys=True, use_db_field=False,separators=(',', ':')) | ||||
| 
 | ||||
|         expected_json = """{"embedded":{"string":"Inner Hello"},"string":"Hello"}""" | ||||
| 
 | ||||
|         assert doc_json == expected_json | ||||
|         self.assertEqual( doc_json, expected_json) | ||||
| 
 | ||||
|     def test_json_simple(self): | ||||
| 
 | ||||
|         class Embedded(EmbeddedDocument): | ||||
|             string = StringField() | ||||
| 
 | ||||
| @@ -43,20 +49,22 @@ class TestJson(MongoDBTestCase): | ||||
|             embedded_field = EmbeddedDocumentField(Embedded) | ||||
| 
 | ||||
|             def __eq__(self, other): | ||||
|                 return ( | ||||
|                     self.string == other.string | ||||
|                     and self.embedded_field == other.embedded_field | ||||
|                 ) | ||||
|                 return (self.string == other.string and | ||||
|                         self.embedded_field == other.embedded_field) | ||||
| 
 | ||||
|         doc = Doc(string="Hi", embedded_field=Embedded(string="Hi")) | ||||
| 
 | ||||
|         doc_json = doc.to_json(sort_keys=True, separators=(",", ":")) | ||||
|         doc_json = doc.to_json(sort_keys=True, separators=(',', ':')) | ||||
|         expected_json = """{"embedded_field":{"string":"Hi"},"string":"Hi"}""" | ||||
|         assert doc_json == expected_json | ||||
|         self.assertEqual(doc_json, expected_json) | ||||
| 
 | ||||
|         assert doc == Doc.from_json(doc.to_json()) | ||||
|         self.assertEqual(doc, Doc.from_json(doc.to_json())) | ||||
| 
 | ||||
|     def test_json_complex(self): | ||||
| 
 | ||||
|         if pymongo.version_tuple[0] <= 2 and pymongo.version_tuple[1] <= 3: | ||||
|             raise SkipTest("Need pymongo 2.4 as has a fix for DBRefs") | ||||
| 
 | ||||
|         class EmbeddedDoc(EmbeddedDocument): | ||||
|             pass | ||||
| 
 | ||||
| @@ -64,43 +72,41 @@ class TestJson(MongoDBTestCase): | ||||
|             pass | ||||
| 
 | ||||
|         class Doc(Document): | ||||
|             string_field = StringField(default="1") | ||||
|             string_field = StringField(default='1') | ||||
|             int_field = IntField(default=1) | ||||
|             float_field = FloatField(default=1.1) | ||||
|             boolean_field = BooleanField(default=True) | ||||
|             datetime_field = DateTimeField(default=datetime.now) | ||||
|             embedded_document_field = EmbeddedDocumentField( | ||||
|                 EmbeddedDoc, default=lambda: EmbeddedDoc() | ||||
|             ) | ||||
|             embedded_document_field = EmbeddedDocumentField(EmbeddedDoc, | ||||
|                                         default=lambda: EmbeddedDoc()) | ||||
|             list_field = ListField(default=lambda: [1, 2, 3]) | ||||
|             dict_field = DictField(default=lambda: {"hello": "world"}) | ||||
|             objectid_field = ObjectIdField(default=ObjectId) | ||||
|             reference_field = ReferenceField(Simple, default=lambda: Simple().save()) | ||||
|             reference_field = ReferenceField(Simple, default=lambda: | ||||
|                                                         Simple().save()) | ||||
|             map_field = MapField(IntField(), default=lambda: {"simple": 1}) | ||||
|             decimal_field = DecimalField(default=1.0) | ||||
|             complex_datetime_field = ComplexDateTimeField(default=datetime.now) | ||||
|             url_field = URLField(default="http://mongoengine.org") | ||||
|             dynamic_field = DynamicField(default=1) | ||||
|             generic_reference_field = GenericReferenceField( | ||||
|                 default=lambda: Simple().save() | ||||
|             ) | ||||
|             sorted_list_field = SortedListField(IntField(), default=lambda: [1, 2, 3]) | ||||
|                                             default=lambda: Simple().save()) | ||||
|             sorted_list_field = SortedListField(IntField(), | ||||
|                                                 default=lambda: [1, 2, 3]) | ||||
|             email_field = EmailField(default="ross@example.com") | ||||
|             geo_point_field = GeoPointField(default=lambda: [1, 2]) | ||||
|             sequence_field = SequenceField() | ||||
|             uuid_field = UUIDField(default=uuid.uuid4) | ||||
|             generic_embedded_document_field = GenericEmbeddedDocumentField( | ||||
|                 default=lambda: EmbeddedDoc() | ||||
|             ) | ||||
|                                         default=lambda: EmbeddedDoc()) | ||||
| 
 | ||||
|             def __eq__(self, other): | ||||
|                 import json | ||||
| 
 | ||||
|                 return json.loads(self.to_json()) == json.loads(other.to_json()) | ||||
| 
 | ||||
|         doc = Doc() | ||||
|         assert doc == Doc.from_json(doc.to_json()) | ||||
|         self.assertEqual(doc, Doc.from_json(doc.to_json())) | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == "__main__": | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
| @@ -1,357 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.pymongo_support import list_collection_names | ||||
| from mongoengine.queryset import NULLIFY, PULL | ||||
|  | ||||
|  | ||||
| class TestClassMethods(unittest.TestCase): | ||||
|     def setUp(self): | ||||
|         connect(db="mongoenginetest") | ||||
|         self.db = get_db() | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             age = IntField() | ||||
|  | ||||
|             non_field = True | ||||
|  | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         self.Person = Person | ||||
|  | ||||
|     def tearDown(self): | ||||
|         for collection in list_collection_names(self.db): | ||||
|             self.db.drop_collection(collection) | ||||
|  | ||||
|     def test_definition(self): | ||||
|         """Ensure that document may be defined using fields. | ||||
|         """ | ||||
|         assert ["_cls", "age", "id", "name"] == sorted(self.Person._fields.keys()) | ||||
|         assert ["IntField", "ObjectIdField", "StringField", "StringField"] == sorted( | ||||
|             [x.__class__.__name__ for x in self.Person._fields.values()] | ||||
|         ) | ||||
|  | ||||
|     def test_get_db(self): | ||||
|         """Ensure that get_db returns the expected db. | ||||
|         """ | ||||
|         db = self.Person._get_db() | ||||
|         assert self.db == db | ||||
|  | ||||
|     def test_get_collection_name(self): | ||||
|         """Ensure that get_collection_name returns the expected collection | ||||
|         name. | ||||
|         """ | ||||
|         collection_name = "person" | ||||
|         assert collection_name == self.Person._get_collection_name() | ||||
|  | ||||
|     def test_get_collection(self): | ||||
|         """Ensure that get_collection returns the expected collection. | ||||
|         """ | ||||
|         collection_name = "person" | ||||
|         collection = self.Person._get_collection() | ||||
|         assert self.db[collection_name] == collection | ||||
|  | ||||
|     def test_drop_collection(self): | ||||
|         """Ensure that the collection may be dropped from the database. | ||||
|         """ | ||||
|         collection_name = "person" | ||||
|         self.Person(name="Test").save() | ||||
|         assert collection_name in list_collection_names(self.db) | ||||
|  | ||||
|         self.Person.drop_collection() | ||||
|         assert collection_name not in list_collection_names(self.db) | ||||
|  | ||||
|     def test_register_delete_rule(self): | ||||
|         """Ensure that register delete rule adds a delete rule to the document | ||||
|         meta. | ||||
|         """ | ||||
|  | ||||
|         class Job(Document): | ||||
|             employee = ReferenceField(self.Person) | ||||
|  | ||||
|         assert self.Person._meta.get("delete_rules") is None | ||||
|  | ||||
|         self.Person.register_delete_rule(Job, "employee", NULLIFY) | ||||
|         assert self.Person._meta["delete_rules"] == {(Job, "employee"): NULLIFY} | ||||
|  | ||||
|     def test_compare_indexes(self): | ||||
|         """ Ensure that the indexes are properly created and that | ||||
|         compare_indexes identifies the missing/extra indexes | ||||
|         """ | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             author = StringField() | ||||
|             title = StringField() | ||||
|             description = StringField() | ||||
|             tags = StringField() | ||||
|  | ||||
|             meta = {"indexes": [("author", "title")]} | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         BlogPost.ensure_indexes() | ||||
|         assert BlogPost.compare_indexes() == {"missing": [], "extra": []} | ||||
|  | ||||
|         BlogPost.ensure_index(["author", "description"]) | ||||
|         assert BlogPost.compare_indexes() == { | ||||
|             "missing": [], | ||||
|             "extra": [[("author", 1), ("description", 1)]], | ||||
|         } | ||||
|  | ||||
|         BlogPost._get_collection().drop_index("author_1_description_1") | ||||
|         assert BlogPost.compare_indexes() == {"missing": [], "extra": []} | ||||
|  | ||||
|         BlogPost._get_collection().drop_index("author_1_title_1") | ||||
|         assert BlogPost.compare_indexes() == { | ||||
|             "missing": [[("author", 1), ("title", 1)]], | ||||
|             "extra": [], | ||||
|         } | ||||
|  | ||||
|     def test_compare_indexes_inheritance(self): | ||||
|         """ Ensure that the indexes are properly created and that | ||||
|         compare_indexes identifies the missing/extra indexes for subclassed | ||||
|         documents (_cls included) | ||||
|         """ | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             author = StringField() | ||||
|             title = StringField() | ||||
|             description = StringField() | ||||
|  | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         class BlogPostWithTags(BlogPost): | ||||
|             tags = StringField() | ||||
|             tag_list = ListField(StringField()) | ||||
|  | ||||
|             meta = {"indexes": [("author", "tags")]} | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         BlogPost.ensure_indexes() | ||||
|         BlogPostWithTags.ensure_indexes() | ||||
|         assert BlogPost.compare_indexes() == {"missing": [], "extra": []} | ||||
|  | ||||
|         BlogPostWithTags.ensure_index(["author", "tag_list"]) | ||||
|         assert BlogPost.compare_indexes() == { | ||||
|             "missing": [], | ||||
|             "extra": [[("_cls", 1), ("author", 1), ("tag_list", 1)]], | ||||
|         } | ||||
|  | ||||
|         BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tag_list_1") | ||||
|         assert BlogPost.compare_indexes() == {"missing": [], "extra": []} | ||||
|  | ||||
|         BlogPostWithTags._get_collection().drop_index("_cls_1_author_1_tags_1") | ||||
|         assert BlogPost.compare_indexes() == { | ||||
|             "missing": [[("_cls", 1), ("author", 1), ("tags", 1)]], | ||||
|             "extra": [], | ||||
|         } | ||||
|  | ||||
|     def test_compare_indexes_multiple_subclasses(self): | ||||
|         """ Ensure that compare_indexes behaves correctly if called from a | ||||
|         class, which base class has multiple subclasses | ||||
|         """ | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             author = StringField() | ||||
|             title = StringField() | ||||
|             description = StringField() | ||||
|  | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         class BlogPostWithTags(BlogPost): | ||||
|             tags = StringField() | ||||
|             tag_list = ListField(StringField()) | ||||
|  | ||||
|             meta = {"indexes": [("author", "tags")]} | ||||
|  | ||||
|         class BlogPostWithCustomField(BlogPost): | ||||
|             custom = DictField() | ||||
|  | ||||
|             meta = {"indexes": [("author", "custom")]} | ||||
|  | ||||
|         BlogPost.ensure_indexes() | ||||
|         BlogPostWithTags.ensure_indexes() | ||||
|         BlogPostWithCustomField.ensure_indexes() | ||||
|  | ||||
|         assert BlogPost.compare_indexes() == {"missing": [], "extra": []} | ||||
|         assert BlogPostWithTags.compare_indexes() == {"missing": [], "extra": []} | ||||
|         assert BlogPostWithCustomField.compare_indexes() == {"missing": [], "extra": []} | ||||
|  | ||||
|     def test_compare_indexes_for_text_indexes(self): | ||||
|         """ Ensure that compare_indexes behaves correctly for text indexes """ | ||||
|  | ||||
|         class Doc(Document): | ||||
|             a = StringField() | ||||
|             b = StringField() | ||||
|             meta = { | ||||
|                 "indexes": [ | ||||
|                     { | ||||
|                         "fields": ["$a", "$b"], | ||||
|                         "default_language": "english", | ||||
|                         "weights": {"a": 10, "b": 2}, | ||||
|                     } | ||||
|                 ] | ||||
|             } | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|         Doc.ensure_indexes() | ||||
|         actual = Doc.compare_indexes() | ||||
|         expected = {"missing": [], "extra": []} | ||||
|         assert actual == expected | ||||
|  | ||||
|     def test_list_indexes_inheritance(self): | ||||
|         """ ensure that all of the indexes are listed regardless of the super- | ||||
|         or sub-class that we call it from | ||||
|         """ | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             author = StringField() | ||||
|             title = StringField() | ||||
|             description = StringField() | ||||
|  | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         class BlogPostWithTags(BlogPost): | ||||
|             tags = StringField() | ||||
|  | ||||
|             meta = {"indexes": [("author", "tags")]} | ||||
|  | ||||
|         class BlogPostWithTagsAndExtraText(BlogPostWithTags): | ||||
|             extra_text = StringField() | ||||
|  | ||||
|             meta = {"indexes": [("author", "tags", "extra_text")]} | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         BlogPost.ensure_indexes() | ||||
|         BlogPostWithTags.ensure_indexes() | ||||
|         BlogPostWithTagsAndExtraText.ensure_indexes() | ||||
|  | ||||
|         assert BlogPost.list_indexes() == BlogPostWithTags.list_indexes() | ||||
|         assert BlogPost.list_indexes() == BlogPostWithTagsAndExtraText.list_indexes() | ||||
|         assert BlogPost.list_indexes() == [ | ||||
|             [("_cls", 1), ("author", 1), ("tags", 1)], | ||||
|             [("_cls", 1), ("author", 1), ("tags", 1), ("extra_text", 1)], | ||||
|             [(u"_id", 1)], | ||||
|             [("_cls", 1)], | ||||
|         ] | ||||
|  | ||||
|     def test_register_delete_rule_inherited(self): | ||||
|         class Vaccine(Document): | ||||
|             name = StringField(required=True) | ||||
|  | ||||
|             meta = {"indexes": ["name"]} | ||||
|  | ||||
|         class Animal(Document): | ||||
|             family = StringField(required=True) | ||||
|             vaccine_made = ListField( | ||||
|                 ReferenceField("Vaccine", reverse_delete_rule=PULL) | ||||
|             ) | ||||
|  | ||||
|             meta = {"allow_inheritance": True, "indexes": ["family"]} | ||||
|  | ||||
|         class Cat(Animal): | ||||
|             name = StringField(required=True) | ||||
|  | ||||
|         assert Vaccine._meta["delete_rules"][(Animal, "vaccine_made")] == PULL | ||||
|         assert Vaccine._meta["delete_rules"][(Cat, "vaccine_made")] == PULL | ||||
|  | ||||
|     def test_collection_naming(self): | ||||
|         """Ensure that a collection with a specified name may be used. | ||||
|         """ | ||||
|  | ||||
|         class DefaultNamingTest(Document): | ||||
|             pass | ||||
|  | ||||
|         assert "default_naming_test" == DefaultNamingTest._get_collection_name() | ||||
|  | ||||
|         class CustomNamingTest(Document): | ||||
|             meta = {"collection": "pimp_my_collection"} | ||||
|  | ||||
|         assert "pimp_my_collection" == CustomNamingTest._get_collection_name() | ||||
|  | ||||
|         class DynamicNamingTest(Document): | ||||
|             meta = {"collection": lambda c: "DYNAMO"} | ||||
|  | ||||
|         assert "DYNAMO" == DynamicNamingTest._get_collection_name() | ||||
|  | ||||
|         # Use Abstract class to handle backwards compatibility | ||||
|         class BaseDocument(Document): | ||||
|             meta = {"abstract": True, "collection": lambda c: c.__name__.lower()} | ||||
|  | ||||
|         class OldNamingConvention(BaseDocument): | ||||
|             pass | ||||
|  | ||||
|         assert "oldnamingconvention" == OldNamingConvention._get_collection_name() | ||||
|  | ||||
|         class InheritedAbstractNamingTest(BaseDocument): | ||||
|             meta = {"collection": "wibble"} | ||||
|  | ||||
|         assert "wibble" == InheritedAbstractNamingTest._get_collection_name() | ||||
|  | ||||
|         # Mixin tests | ||||
|         class BaseMixin(object): | ||||
|             meta = {"collection": lambda c: c.__name__.lower()} | ||||
|  | ||||
|         class OldMixinNamingConvention(Document, BaseMixin): | ||||
|             pass | ||||
|  | ||||
|         assert ( | ||||
|             "oldmixinnamingconvention" | ||||
|             == OldMixinNamingConvention._get_collection_name() | ||||
|         ) | ||||
|  | ||||
|         class BaseMixin(object): | ||||
|             meta = {"collection": lambda c: c.__name__.lower()} | ||||
|  | ||||
|         class BaseDocument(Document, BaseMixin): | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         class MyDocument(BaseDocument): | ||||
|             pass | ||||
|  | ||||
|         assert "basedocument" == MyDocument._get_collection_name() | ||||
|  | ||||
|     def test_custom_collection_name_operations(self): | ||||
|         """Ensure that a collection with a specified name is used as expected. | ||||
|         """ | ||||
|         collection_name = "personCollTest" | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             meta = {"collection": collection_name} | ||||
|  | ||||
|         Person(name="Test User").save() | ||||
|         assert collection_name in list_collection_names(self.db) | ||||
|  | ||||
|         user_obj = self.db[collection_name].find_one() | ||||
|         assert user_obj["name"] == "Test User" | ||||
|  | ||||
|         user_obj = Person.objects[0] | ||||
|         assert user_obj.name == "Test User" | ||||
|  | ||||
|         Person.drop_collection() | ||||
|         assert collection_name not in list_collection_names(self.db) | ||||
|  | ||||
|     def test_collection_name_and_primary(self): | ||||
|         """Ensure that a collection with a specified name may be used. | ||||
|         """ | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField(primary_key=True) | ||||
|             meta = {"collection": "app"} | ||||
|  | ||||
|         Person(name="Test User").save() | ||||
|  | ||||
|         user_obj = Person.objects.first() | ||||
|         assert user_obj.name == "Test User" | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     unittest.main() | ||||
| @@ -1,916 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import unittest | ||||
|  | ||||
| from bson import SON | ||||
| from mongoengine import * | ||||
| from mongoengine.pymongo_support import list_collection_names | ||||
| from tests.utils import MongoDBTestCase | ||||
|  | ||||
|  | ||||
| class TestDelta(MongoDBTestCase): | ||||
|     def setUp(self): | ||||
|         super(TestDelta, self).setUp() | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             age = IntField() | ||||
|  | ||||
|             non_field = True | ||||
|  | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         self.Person = Person | ||||
|  | ||||
|     def tearDown(self): | ||||
|         for collection in list_collection_names(self.db): | ||||
|             self.db.drop_collection(collection) | ||||
|  | ||||
|     def test_delta(self): | ||||
|         self.delta(Document) | ||||
|         self.delta(DynamicDocument) | ||||
|  | ||||
|     def delta(self, DocClass): | ||||
|         class Doc(DocClass): | ||||
|             string_field = StringField() | ||||
|             int_field = IntField() | ||||
|             dict_field = DictField() | ||||
|             list_field = ListField() | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|         doc = Doc() | ||||
|         doc.save() | ||||
|  | ||||
|         doc = Doc.objects.first() | ||||
|         assert doc._get_changed_fields() == [] | ||||
|         assert doc._delta() == ({}, {}) | ||||
|  | ||||
|         doc.string_field = "hello" | ||||
|         assert doc._get_changed_fields() == ["string_field"] | ||||
|         assert doc._delta() == ({"string_field": "hello"}, {}) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         doc.int_field = 1 | ||||
|         assert doc._get_changed_fields() == ["int_field"] | ||||
|         assert doc._delta() == ({"int_field": 1}, {}) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         dict_value = {"hello": "world", "ping": "pong"} | ||||
|         doc.dict_field = dict_value | ||||
|         assert doc._get_changed_fields() == ["dict_field"] | ||||
|         assert doc._delta() == ({"dict_field": dict_value}, {}) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         list_value = ["1", 2, {"hello": "world"}] | ||||
|         doc.list_field = list_value | ||||
|         assert doc._get_changed_fields() == ["list_field"] | ||||
|         assert doc._delta() == ({"list_field": list_value}, {}) | ||||
|  | ||||
|         # Test unsetting | ||||
|         doc._changed_fields = [] | ||||
|         doc.dict_field = {} | ||||
|         assert doc._get_changed_fields() == ["dict_field"] | ||||
|         assert doc._delta() == ({}, {"dict_field": 1}) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         doc.list_field = [] | ||||
|         assert doc._get_changed_fields() == ["list_field"] | ||||
|         assert doc._delta() == ({}, {"list_field": 1}) | ||||
|  | ||||
|     def test_delta_recursive(self): | ||||
|         self.delta_recursive(Document, EmbeddedDocument) | ||||
|         self.delta_recursive(DynamicDocument, EmbeddedDocument) | ||||
|         self.delta_recursive(Document, DynamicEmbeddedDocument) | ||||
|         self.delta_recursive(DynamicDocument, DynamicEmbeddedDocument) | ||||
|  | ||||
|     def delta_recursive(self, DocClass, EmbeddedClass): | ||||
|         class Embedded(EmbeddedClass): | ||||
|             id = StringField() | ||||
|             string_field = StringField() | ||||
|             int_field = IntField() | ||||
|             dict_field = DictField() | ||||
|             list_field = ListField() | ||||
|  | ||||
|         class Doc(DocClass): | ||||
|             string_field = StringField() | ||||
|             int_field = IntField() | ||||
|             dict_field = DictField() | ||||
|             list_field = ListField() | ||||
|             embedded_field = EmbeddedDocumentField(Embedded) | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|         doc = Doc() | ||||
|         doc.save() | ||||
|  | ||||
|         doc = Doc.objects.first() | ||||
|         assert doc._get_changed_fields() == [] | ||||
|         assert doc._delta() == ({}, {}) | ||||
|  | ||||
|         embedded_1 = Embedded() | ||||
|         embedded_1.id = "010101" | ||||
|         embedded_1.string_field = "hello" | ||||
|         embedded_1.int_field = 1 | ||||
|         embedded_1.dict_field = {"hello": "world"} | ||||
|         embedded_1.list_field = ["1", 2, {"hello": "world"}] | ||||
|         doc.embedded_field = embedded_1 | ||||
|  | ||||
|         assert doc._get_changed_fields() == ["embedded_field"] | ||||
|  | ||||
|         embedded_delta = { | ||||
|             "id": "010101", | ||||
|             "string_field": "hello", | ||||
|             "int_field": 1, | ||||
|             "dict_field": {"hello": "world"}, | ||||
|             "list_field": ["1", 2, {"hello": "world"}], | ||||
|         } | ||||
|         assert doc.embedded_field._delta() == (embedded_delta, {}) | ||||
|         assert doc._delta() == ({"embedded_field": embedded_delta}, {}) | ||||
|  | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         doc.embedded_field.dict_field = {} | ||||
|         assert doc._get_changed_fields() == ["embedded_field.dict_field"] | ||||
|         assert doc.embedded_field._delta() == ({}, {"dict_field": 1}) | ||||
|         assert doc._delta() == ({}, {"embedded_field.dict_field": 1}) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         assert doc.embedded_field.dict_field == {} | ||||
|  | ||||
|         doc.embedded_field.list_field = [] | ||||
|         assert doc._get_changed_fields() == ["embedded_field.list_field"] | ||||
|         assert doc.embedded_field._delta() == ({}, {"list_field": 1}) | ||||
|         assert doc._delta() == ({}, {"embedded_field.list_field": 1}) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         assert doc.embedded_field.list_field == [] | ||||
|  | ||||
|         embedded_2 = Embedded() | ||||
|         embedded_2.string_field = "hello" | ||||
|         embedded_2.int_field = 1 | ||||
|         embedded_2.dict_field = {"hello": "world"} | ||||
|         embedded_2.list_field = ["1", 2, {"hello": "world"}] | ||||
|  | ||||
|         doc.embedded_field.list_field = ["1", 2, embedded_2] | ||||
|         assert doc._get_changed_fields() == ["embedded_field.list_field"] | ||||
|  | ||||
|         assert doc.embedded_field._delta() == ( | ||||
|             { | ||||
|                 "list_field": [ | ||||
|                     "1", | ||||
|                     2, | ||||
|                     { | ||||
|                         "_cls": "Embedded", | ||||
|                         "string_field": "hello", | ||||
|                         "dict_field": {"hello": "world"}, | ||||
|                         "int_field": 1, | ||||
|                         "list_field": ["1", 2, {"hello": "world"}], | ||||
|                     }, | ||||
|                 ] | ||||
|             }, | ||||
|             {}, | ||||
|         ) | ||||
|  | ||||
|         assert doc._delta() == ( | ||||
|             { | ||||
|                 "embedded_field.list_field": [ | ||||
|                     "1", | ||||
|                     2, | ||||
|                     { | ||||
|                         "_cls": "Embedded", | ||||
|                         "string_field": "hello", | ||||
|                         "dict_field": {"hello": "world"}, | ||||
|                         "int_field": 1, | ||||
|                         "list_field": ["1", 2, {"hello": "world"}], | ||||
|                     }, | ||||
|                 ] | ||||
|             }, | ||||
|             {}, | ||||
|         ) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         assert doc.embedded_field.list_field[0] == "1" | ||||
|         assert doc.embedded_field.list_field[1] == 2 | ||||
|         for k in doc.embedded_field.list_field[2]._fields: | ||||
|             assert doc.embedded_field.list_field[2][k] == embedded_2[k] | ||||
|  | ||||
|         doc.embedded_field.list_field[2].string_field = "world" | ||||
|         assert doc._get_changed_fields() == ["embedded_field.list_field.2.string_field"] | ||||
|         assert doc.embedded_field._delta() == ( | ||||
|             {"list_field.2.string_field": "world"}, | ||||
|             {}, | ||||
|         ) | ||||
|         assert doc._delta() == ( | ||||
|             {"embedded_field.list_field.2.string_field": "world"}, | ||||
|             {}, | ||||
|         ) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         assert doc.embedded_field.list_field[2].string_field == "world" | ||||
|  | ||||
|         # Test multiple assignments | ||||
|         doc.embedded_field.list_field[2].string_field = "hello world" | ||||
|         doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] | ||||
|         assert doc._get_changed_fields() == ["embedded_field.list_field.2"] | ||||
|         assert doc.embedded_field._delta() == ( | ||||
|             { | ||||
|                 "list_field.2": { | ||||
|                     "_cls": "Embedded", | ||||
|                     "string_field": "hello world", | ||||
|                     "int_field": 1, | ||||
|                     "list_field": ["1", 2, {"hello": "world"}], | ||||
|                     "dict_field": {"hello": "world"}, | ||||
|                 } | ||||
|             }, | ||||
|             {}, | ||||
|         ) | ||||
|         assert doc._delta() == ( | ||||
|             { | ||||
|                 "embedded_field.list_field.2": { | ||||
|                     "_cls": "Embedded", | ||||
|                     "string_field": "hello world", | ||||
|                     "int_field": 1, | ||||
|                     "list_field": ["1", 2, {"hello": "world"}], | ||||
|                     "dict_field": {"hello": "world"}, | ||||
|                 } | ||||
|             }, | ||||
|             {}, | ||||
|         ) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         assert doc.embedded_field.list_field[2].string_field == "hello world" | ||||
|  | ||||
|         # Test list native methods | ||||
|         doc.embedded_field.list_field[2].list_field.pop(0) | ||||
|         assert doc._delta() == ( | ||||
|             {"embedded_field.list_field.2.list_field": [2, {"hello": "world"}]}, | ||||
|             {}, | ||||
|         ) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         doc.embedded_field.list_field[2].list_field.append(1) | ||||
|         assert doc._delta() == ( | ||||
|             {"embedded_field.list_field.2.list_field": [2, {"hello": "world"}, 1]}, | ||||
|             {}, | ||||
|         ) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         assert doc.embedded_field.list_field[2].list_field == [2, {"hello": "world"}, 1] | ||||
|  | ||||
|         doc.embedded_field.list_field[2].list_field.sort(key=str) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         assert doc.embedded_field.list_field[2].list_field == [1, 2, {"hello": "world"}] | ||||
|  | ||||
|         del doc.embedded_field.list_field[2].list_field[2]["hello"] | ||||
|         assert doc._delta() == ( | ||||
|             {}, | ||||
|             {"embedded_field.list_field.2.list_field.2.hello": 1}, | ||||
|         ) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         del doc.embedded_field.list_field[2].list_field | ||||
|         assert doc._delta() == ({}, {"embedded_field.list_field.2.list_field": 1}) | ||||
|  | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         doc.dict_field["Embedded"] = embedded_1 | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         doc.dict_field["Embedded"].string_field = "Hello World" | ||||
|         assert doc._get_changed_fields() == ["dict_field.Embedded.string_field"] | ||||
|         assert doc._delta() == ({"dict_field.Embedded.string_field": "Hello World"}, {}) | ||||
|  | ||||
|     def test_circular_reference_deltas(self): | ||||
|         self.circular_reference_deltas(Document, Document) | ||||
|         self.circular_reference_deltas(Document, DynamicDocument) | ||||
|         self.circular_reference_deltas(DynamicDocument, Document) | ||||
|         self.circular_reference_deltas(DynamicDocument, DynamicDocument) | ||||
|  | ||||
|     def circular_reference_deltas(self, DocClass1, DocClass2): | ||||
|         class Person(DocClass1): | ||||
|             name = StringField() | ||||
|             owns = ListField(ReferenceField("Organization")) | ||||
|  | ||||
|         class Organization(DocClass2): | ||||
|             name = StringField() | ||||
|             owner = ReferenceField("Person") | ||||
|  | ||||
|         Person.drop_collection() | ||||
|         Organization.drop_collection() | ||||
|  | ||||
|         person = Person(name="owner").save() | ||||
|         organization = Organization(name="company").save() | ||||
|  | ||||
|         person.owns.append(organization) | ||||
|         organization.owner = person | ||||
|  | ||||
|         person.save() | ||||
|         organization.save() | ||||
|  | ||||
|         p = Person.objects[0].select_related() | ||||
|         o = Organization.objects.first() | ||||
|         assert p.owns[0] == o | ||||
|         assert o.owner == p | ||||
|  | ||||
|     def test_circular_reference_deltas_2(self): | ||||
|         self.circular_reference_deltas_2(Document, Document) | ||||
|         self.circular_reference_deltas_2(Document, DynamicDocument) | ||||
|         self.circular_reference_deltas_2(DynamicDocument, Document) | ||||
|         self.circular_reference_deltas_2(DynamicDocument, DynamicDocument) | ||||
|  | ||||
|     def circular_reference_deltas_2(self, DocClass1, DocClass2, dbref=True): | ||||
|         class Person(DocClass1): | ||||
|             name = StringField() | ||||
|             owns = ListField(ReferenceField("Organization", dbref=dbref)) | ||||
|             employer = ReferenceField("Organization", dbref=dbref) | ||||
|  | ||||
|         class Organization(DocClass2): | ||||
|             name = StringField() | ||||
|             owner = ReferenceField("Person", dbref=dbref) | ||||
|             employees = ListField(ReferenceField("Person", dbref=dbref)) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|         Organization.drop_collection() | ||||
|  | ||||
|         person = Person(name="owner").save() | ||||
|         employee = Person(name="employee").save() | ||||
|         organization = Organization(name="company").save() | ||||
|  | ||||
|         person.owns.append(organization) | ||||
|         organization.owner = person | ||||
|  | ||||
|         organization.employees.append(employee) | ||||
|         employee.employer = organization | ||||
|  | ||||
|         person.save() | ||||
|         organization.save() | ||||
|         employee.save() | ||||
|  | ||||
|         p = Person.objects.get(name="owner") | ||||
|         e = Person.objects.get(name="employee") | ||||
|         o = Organization.objects.first() | ||||
|  | ||||
|         assert p.owns[0] == o | ||||
|         assert o.owner == p | ||||
|         assert e.employer == o | ||||
|  | ||||
|         return person, organization, employee | ||||
|  | ||||
|     def test_delta_db_field(self): | ||||
|         self.delta_db_field(Document) | ||||
|         self.delta_db_field(DynamicDocument) | ||||
|  | ||||
|     def delta_db_field(self, DocClass): | ||||
|         class Doc(DocClass): | ||||
|             string_field = StringField(db_field="db_string_field") | ||||
|             int_field = IntField(db_field="db_int_field") | ||||
|             dict_field = DictField(db_field="db_dict_field") | ||||
|             list_field = ListField(db_field="db_list_field") | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|         doc = Doc() | ||||
|         doc.save() | ||||
|  | ||||
|         doc = Doc.objects.first() | ||||
|         assert doc._get_changed_fields() == [] | ||||
|         assert doc._delta() == ({}, {}) | ||||
|  | ||||
|         doc.string_field = "hello" | ||||
|         assert doc._get_changed_fields() == ["db_string_field"] | ||||
|         assert doc._delta() == ({"db_string_field": "hello"}, {}) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         doc.int_field = 1 | ||||
|         assert doc._get_changed_fields() == ["db_int_field"] | ||||
|         assert doc._delta() == ({"db_int_field": 1}, {}) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         dict_value = {"hello": "world", "ping": "pong"} | ||||
|         doc.dict_field = dict_value | ||||
|         assert doc._get_changed_fields() == ["db_dict_field"] | ||||
|         assert doc._delta() == ({"db_dict_field": dict_value}, {}) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         list_value = ["1", 2, {"hello": "world"}] | ||||
|         doc.list_field = list_value | ||||
|         assert doc._get_changed_fields() == ["db_list_field"] | ||||
|         assert doc._delta() == ({"db_list_field": list_value}, {}) | ||||
|  | ||||
|         # Test unsetting | ||||
|         doc._changed_fields = [] | ||||
|         doc.dict_field = {} | ||||
|         assert doc._get_changed_fields() == ["db_dict_field"] | ||||
|         assert doc._delta() == ({}, {"db_dict_field": 1}) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         doc.list_field = [] | ||||
|         assert doc._get_changed_fields() == ["db_list_field"] | ||||
|         assert doc._delta() == ({}, {"db_list_field": 1}) | ||||
|  | ||||
|         # Test it saves that data | ||||
|         doc = Doc() | ||||
|         doc.save() | ||||
|  | ||||
|         doc.string_field = "hello" | ||||
|         doc.int_field = 1 | ||||
|         doc.dict_field = {"hello": "world"} | ||||
|         doc.list_field = ["1", 2, {"hello": "world"}] | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         assert doc.string_field == "hello" | ||||
|         assert doc.int_field == 1 | ||||
|         assert doc.dict_field == {"hello": "world"} | ||||
|         assert doc.list_field == ["1", 2, {"hello": "world"}] | ||||
|  | ||||
|     def test_delta_recursive_db_field(self): | ||||
|         self.delta_recursive_db_field(Document, EmbeddedDocument) | ||||
|         self.delta_recursive_db_field(Document, DynamicEmbeddedDocument) | ||||
|         self.delta_recursive_db_field(DynamicDocument, EmbeddedDocument) | ||||
|         self.delta_recursive_db_field(DynamicDocument, DynamicEmbeddedDocument) | ||||
|  | ||||
|     def delta_recursive_db_field(self, DocClass, EmbeddedClass): | ||||
|         class Embedded(EmbeddedClass): | ||||
|             string_field = StringField(db_field="db_string_field") | ||||
|             int_field = IntField(db_field="db_int_field") | ||||
|             dict_field = DictField(db_field="db_dict_field") | ||||
|             list_field = ListField(db_field="db_list_field") | ||||
|  | ||||
|         class Doc(DocClass): | ||||
|             string_field = StringField(db_field="db_string_field") | ||||
|             int_field = IntField(db_field="db_int_field") | ||||
|             dict_field = DictField(db_field="db_dict_field") | ||||
|             list_field = ListField(db_field="db_list_field") | ||||
|             embedded_field = EmbeddedDocumentField( | ||||
|                 Embedded, db_field="db_embedded_field" | ||||
|             ) | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|         doc = Doc() | ||||
|         doc.save() | ||||
|  | ||||
|         doc = Doc.objects.first() | ||||
|         assert doc._get_changed_fields() == [] | ||||
|         assert doc._delta() == ({}, {}) | ||||
|  | ||||
|         embedded_1 = Embedded() | ||||
|         embedded_1.string_field = "hello" | ||||
|         embedded_1.int_field = 1 | ||||
|         embedded_1.dict_field = {"hello": "world"} | ||||
|         embedded_1.list_field = ["1", 2, {"hello": "world"}] | ||||
|         doc.embedded_field = embedded_1 | ||||
|  | ||||
|         assert doc._get_changed_fields() == ["db_embedded_field"] | ||||
|  | ||||
|         embedded_delta = { | ||||
|             "db_string_field": "hello", | ||||
|             "db_int_field": 1, | ||||
|             "db_dict_field": {"hello": "world"}, | ||||
|             "db_list_field": ["1", 2, {"hello": "world"}], | ||||
|         } | ||||
|         assert doc.embedded_field._delta() == (embedded_delta, {}) | ||||
|         assert doc._delta() == ({"db_embedded_field": embedded_delta}, {}) | ||||
|  | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         doc.embedded_field.dict_field = {} | ||||
|         assert doc._get_changed_fields() == ["db_embedded_field.db_dict_field"] | ||||
|         assert doc.embedded_field._delta() == ({}, {"db_dict_field": 1}) | ||||
|         assert doc._delta() == ({}, {"db_embedded_field.db_dict_field": 1}) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         assert doc.embedded_field.dict_field == {} | ||||
|  | ||||
|         doc.embedded_field.list_field = [] | ||||
|         assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"] | ||||
|         assert doc.embedded_field._delta() == ({}, {"db_list_field": 1}) | ||||
|         assert doc._delta() == ({}, {"db_embedded_field.db_list_field": 1}) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         assert doc.embedded_field.list_field == [] | ||||
|  | ||||
|         embedded_2 = Embedded() | ||||
|         embedded_2.string_field = "hello" | ||||
|         embedded_2.int_field = 1 | ||||
|         embedded_2.dict_field = {"hello": "world"} | ||||
|         embedded_2.list_field = ["1", 2, {"hello": "world"}] | ||||
|  | ||||
|         doc.embedded_field.list_field = ["1", 2, embedded_2] | ||||
|         assert doc._get_changed_fields() == ["db_embedded_field.db_list_field"] | ||||
|         assert doc.embedded_field._delta() == ( | ||||
|             { | ||||
|                 "db_list_field": [ | ||||
|                     "1", | ||||
|                     2, | ||||
|                     { | ||||
|                         "_cls": "Embedded", | ||||
|                         "db_string_field": "hello", | ||||
|                         "db_dict_field": {"hello": "world"}, | ||||
|                         "db_int_field": 1, | ||||
|                         "db_list_field": ["1", 2, {"hello": "world"}], | ||||
|                     }, | ||||
|                 ] | ||||
|             }, | ||||
|             {}, | ||||
|         ) | ||||
|  | ||||
|         assert doc._delta() == ( | ||||
|             { | ||||
|                 "db_embedded_field.db_list_field": [ | ||||
|                     "1", | ||||
|                     2, | ||||
|                     { | ||||
|                         "_cls": "Embedded", | ||||
|                         "db_string_field": "hello", | ||||
|                         "db_dict_field": {"hello": "world"}, | ||||
|                         "db_int_field": 1, | ||||
|                         "db_list_field": ["1", 2, {"hello": "world"}], | ||||
|                     }, | ||||
|                 ] | ||||
|             }, | ||||
|             {}, | ||||
|         ) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         assert doc.embedded_field.list_field[0] == "1" | ||||
|         assert doc.embedded_field.list_field[1] == 2 | ||||
|         for k in doc.embedded_field.list_field[2]._fields: | ||||
|             assert doc.embedded_field.list_field[2][k] == embedded_2[k] | ||||
|  | ||||
|         doc.embedded_field.list_field[2].string_field = "world" | ||||
|         assert doc._get_changed_fields() == [ | ||||
|             "db_embedded_field.db_list_field.2.db_string_field" | ||||
|         ] | ||||
|         assert doc.embedded_field._delta() == ( | ||||
|             {"db_list_field.2.db_string_field": "world"}, | ||||
|             {}, | ||||
|         ) | ||||
|         assert doc._delta() == ( | ||||
|             {"db_embedded_field.db_list_field.2.db_string_field": "world"}, | ||||
|             {}, | ||||
|         ) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         assert doc.embedded_field.list_field[2].string_field == "world" | ||||
|  | ||||
|         # Test multiple assignments | ||||
|         doc.embedded_field.list_field[2].string_field = "hello world" | ||||
|         doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] | ||||
|         assert doc._get_changed_fields() == ["db_embedded_field.db_list_field.2"] | ||||
|         assert doc.embedded_field._delta() == ( | ||||
|             { | ||||
|                 "db_list_field.2": { | ||||
|                     "_cls": "Embedded", | ||||
|                     "db_string_field": "hello world", | ||||
|                     "db_int_field": 1, | ||||
|                     "db_list_field": ["1", 2, {"hello": "world"}], | ||||
|                     "db_dict_field": {"hello": "world"}, | ||||
|                 } | ||||
|             }, | ||||
|             {}, | ||||
|         ) | ||||
|         assert doc._delta() == ( | ||||
|             { | ||||
|                 "db_embedded_field.db_list_field.2": { | ||||
|                     "_cls": "Embedded", | ||||
|                     "db_string_field": "hello world", | ||||
|                     "db_int_field": 1, | ||||
|                     "db_list_field": ["1", 2, {"hello": "world"}], | ||||
|                     "db_dict_field": {"hello": "world"}, | ||||
|                 } | ||||
|             }, | ||||
|             {}, | ||||
|         ) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         assert doc.embedded_field.list_field[2].string_field == "hello world" | ||||
|  | ||||
|         # Test list native methods | ||||
|         doc.embedded_field.list_field[2].list_field.pop(0) | ||||
|         assert doc._delta() == ( | ||||
|             { | ||||
|                 "db_embedded_field.db_list_field.2.db_list_field": [ | ||||
|                     2, | ||||
|                     {"hello": "world"}, | ||||
|                 ] | ||||
|             }, | ||||
|             {}, | ||||
|         ) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         doc.embedded_field.list_field[2].list_field.append(1) | ||||
|         assert doc._delta() == ( | ||||
|             { | ||||
|                 "db_embedded_field.db_list_field.2.db_list_field": [ | ||||
|                     2, | ||||
|                     {"hello": "world"}, | ||||
|                     1, | ||||
|                 ] | ||||
|             }, | ||||
|             {}, | ||||
|         ) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         assert doc.embedded_field.list_field[2].list_field == [2, {"hello": "world"}, 1] | ||||
|  | ||||
|         doc.embedded_field.list_field[2].list_field.sort(key=str) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|         assert doc.embedded_field.list_field[2].list_field == [1, 2, {"hello": "world"}] | ||||
|  | ||||
|         del doc.embedded_field.list_field[2].list_field[2]["hello"] | ||||
|         assert doc._delta() == ( | ||||
|             {}, | ||||
|             {"db_embedded_field.db_list_field.2.db_list_field.2.hello": 1}, | ||||
|         ) | ||||
|         doc.save() | ||||
|         doc = doc.reload(10) | ||||
|  | ||||
|         del doc.embedded_field.list_field[2].list_field | ||||
|         assert doc._delta() == ( | ||||
|             {}, | ||||
|             {"db_embedded_field.db_list_field.2.db_list_field": 1}, | ||||
|         ) | ||||
|  | ||||
|     def test_delta_for_dynamic_documents(self): | ||||
|         class Person(DynamicDocument): | ||||
|             name = StringField() | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         p = Person(name="James", age=34) | ||||
|         assert p._delta() == ( | ||||
|             SON([("_cls", "Person"), ("name", "James"), ("age", 34)]), | ||||
|             {}, | ||||
|         ) | ||||
|  | ||||
|         p.doc = 123 | ||||
|         del p.doc | ||||
|         assert p._delta() == ( | ||||
|             SON([("_cls", "Person"), ("name", "James"), ("age", 34)]), | ||||
|             {}, | ||||
|         ) | ||||
|  | ||||
|         p = Person() | ||||
|         p.name = "Dean" | ||||
|         p.age = 22 | ||||
|         p.save() | ||||
|  | ||||
|         p.age = 24 | ||||
|         assert p.age == 24 | ||||
|         assert p._get_changed_fields() == ["age"] | ||||
|         assert p._delta() == ({"age": 24}, {}) | ||||
|  | ||||
|         p = Person.objects(age=22).get() | ||||
|         p.age = 24 | ||||
|         assert p.age == 24 | ||||
|         assert p._get_changed_fields() == ["age"] | ||||
|         assert p._delta() == ({"age": 24}, {}) | ||||
|  | ||||
|         p.save() | ||||
|         assert 1 == Person.objects(age=24).count() | ||||
|  | ||||
|     def test_dynamic_delta(self): | ||||
|         class Doc(DynamicDocument): | ||||
|             pass | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|         doc = Doc() | ||||
|         doc.save() | ||||
|  | ||||
|         doc = Doc.objects.first() | ||||
|         assert doc._get_changed_fields() == [] | ||||
|         assert doc._delta() == ({}, {}) | ||||
|  | ||||
|         doc.string_field = "hello" | ||||
|         assert doc._get_changed_fields() == ["string_field"] | ||||
|         assert doc._delta() == ({"string_field": "hello"}, {}) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         doc.int_field = 1 | ||||
|         assert doc._get_changed_fields() == ["int_field"] | ||||
|         assert doc._delta() == ({"int_field": 1}, {}) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         dict_value = {"hello": "world", "ping": "pong"} | ||||
|         doc.dict_field = dict_value | ||||
|         assert doc._get_changed_fields() == ["dict_field"] | ||||
|         assert doc._delta() == ({"dict_field": dict_value}, {}) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         list_value = ["1", 2, {"hello": "world"}] | ||||
|         doc.list_field = list_value | ||||
|         assert doc._get_changed_fields() == ["list_field"] | ||||
|         assert doc._delta() == ({"list_field": list_value}, {}) | ||||
|  | ||||
|         # Test unsetting | ||||
|         doc._changed_fields = [] | ||||
|         doc.dict_field = {} | ||||
|         assert doc._get_changed_fields() == ["dict_field"] | ||||
|         assert doc._delta() == ({}, {"dict_field": 1}) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         doc.list_field = [] | ||||
|         assert doc._get_changed_fields() == ["list_field"] | ||||
|         assert doc._delta() == ({}, {"list_field": 1}) | ||||
|  | ||||
|     def test_delta_with_dbref_true(self): | ||||
|         person, organization, employee = self.circular_reference_deltas_2( | ||||
|             Document, Document, True | ||||
|         ) | ||||
|         employee.name = "test" | ||||
|  | ||||
|         assert organization._get_changed_fields() == [] | ||||
|  | ||||
|         updates, removals = organization._delta() | ||||
|         assert {} == removals | ||||
|         assert {} == updates | ||||
|  | ||||
|         organization.employees.append(person) | ||||
|         updates, removals = organization._delta() | ||||
|         assert {} == removals | ||||
|         assert "employees" in updates | ||||
|  | ||||
|     def test_delta_with_dbref_false(self): | ||||
|         person, organization, employee = self.circular_reference_deltas_2( | ||||
|             Document, Document, False | ||||
|         ) | ||||
|         employee.name = "test" | ||||
|  | ||||
|         assert organization._get_changed_fields() == [] | ||||
|  | ||||
|         updates, removals = organization._delta() | ||||
|         assert {} == removals | ||||
|         assert {} == updates | ||||
|  | ||||
|         organization.employees.append(person) | ||||
|         updates, removals = organization._delta() | ||||
|         assert {} == removals | ||||
|         assert "employees" in updates | ||||
|  | ||||
|     def test_nested_nested_fields_mark_as_changed(self): | ||||
|         class EmbeddedDoc(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|  | ||||
|         class MyDoc(Document): | ||||
|             subs = MapField(MapField(EmbeddedDocumentField(EmbeddedDoc))) | ||||
|             name = StringField() | ||||
|  | ||||
|         MyDoc.drop_collection() | ||||
|  | ||||
|         mydoc = MyDoc( | ||||
|             name="testcase1", subs={"a": {"b": EmbeddedDoc(name="foo")}} | ||||
|         ).save() | ||||
|  | ||||
|         mydoc = MyDoc.objects.first() | ||||
|         subdoc = mydoc.subs["a"]["b"] | ||||
|         subdoc.name = "bar" | ||||
|  | ||||
|         assert ["name"] == subdoc._get_changed_fields() | ||||
|         assert ["subs.a.b.name"] == mydoc._get_changed_fields() | ||||
|  | ||||
|         mydoc._clear_changed_fields() | ||||
|         assert [] == mydoc._get_changed_fields() | ||||
|  | ||||
|     def test_lower_level_mark_as_changed(self): | ||||
|         class EmbeddedDoc(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|  | ||||
|         class MyDoc(Document): | ||||
|             subs = MapField(EmbeddedDocumentField(EmbeddedDoc)) | ||||
|  | ||||
|         MyDoc.drop_collection() | ||||
|  | ||||
|         MyDoc().save() | ||||
|  | ||||
|         mydoc = MyDoc.objects.first() | ||||
|         mydoc.subs["a"] = EmbeddedDoc() | ||||
|         assert ["subs.a"] == mydoc._get_changed_fields() | ||||
|  | ||||
|         subdoc = mydoc.subs["a"] | ||||
|         subdoc.name = "bar" | ||||
|  | ||||
|         assert ["name"] == subdoc._get_changed_fields() | ||||
|         assert ["subs.a"] == mydoc._get_changed_fields() | ||||
|         mydoc.save() | ||||
|  | ||||
|         mydoc._clear_changed_fields() | ||||
|         assert [] == mydoc._get_changed_fields() | ||||
|  | ||||
|     def test_upper_level_mark_as_changed(self): | ||||
|         class EmbeddedDoc(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|  | ||||
|         class MyDoc(Document): | ||||
|             subs = MapField(EmbeddedDocumentField(EmbeddedDoc)) | ||||
|  | ||||
|         MyDoc.drop_collection() | ||||
|  | ||||
|         MyDoc(subs={"a": EmbeddedDoc(name="foo")}).save() | ||||
|  | ||||
|         mydoc = MyDoc.objects.first() | ||||
|         subdoc = mydoc.subs["a"] | ||||
|         subdoc.name = "bar" | ||||
|  | ||||
|         assert ["name"] == subdoc._get_changed_fields() | ||||
|         assert ["subs.a.name"] == mydoc._get_changed_fields() | ||||
|  | ||||
|         mydoc.subs["a"] = EmbeddedDoc() | ||||
|         assert ["subs.a"] == mydoc._get_changed_fields() | ||||
|         mydoc.save() | ||||
|  | ||||
|         mydoc._clear_changed_fields() | ||||
|         assert [] == mydoc._get_changed_fields() | ||||
|  | ||||
|     def test_referenced_object_changed_attributes(self): | ||||
|         """Ensures that when you save a new reference to a field, the referenced object isn't altered""" | ||||
|  | ||||
|         class Organization(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         class User(Document): | ||||
|             name = StringField() | ||||
|             org = ReferenceField("Organization", required=True) | ||||
|  | ||||
|         Organization.drop_collection() | ||||
|         User.drop_collection() | ||||
|  | ||||
|         org1 = Organization(name="Org 1") | ||||
|         org1.save() | ||||
|  | ||||
|         org2 = Organization(name="Org 2") | ||||
|         org2.save() | ||||
|  | ||||
|         user = User(name="Fred", org=org1) | ||||
|         user.save() | ||||
|  | ||||
|         org1.reload() | ||||
|         org2.reload() | ||||
|         user.reload() | ||||
|         assert org1.name == "Org 1" | ||||
|         assert org2.name == "Org 2" | ||||
|         assert user.name == "Fred" | ||||
|  | ||||
|         user.name = "Harold" | ||||
|         user.org = org2 | ||||
|  | ||||
|         org2.name = "New Org 2" | ||||
|         assert org2.name == "New Org 2" | ||||
|  | ||||
|         user.save() | ||||
|         org2.save() | ||||
|  | ||||
|         assert org2.name == "New Org 2" | ||||
|         org2.reload() | ||||
|         assert org2.name == "New Org 2" | ||||
|  | ||||
|     def test_delta_for_nested_map_fields(self): | ||||
|         class UInfoDocument(Document): | ||||
|             phone = StringField() | ||||
|  | ||||
|         class EmbeddedRole(EmbeddedDocument): | ||||
|             type = StringField() | ||||
|  | ||||
|         class EmbeddedUser(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|             roles = MapField(field=EmbeddedDocumentField(EmbeddedRole)) | ||||
|             rolist = ListField(field=EmbeddedDocumentField(EmbeddedRole)) | ||||
|             info = ReferenceField(UInfoDocument) | ||||
|  | ||||
|         class Doc(Document): | ||||
|             users = MapField(field=EmbeddedDocumentField(EmbeddedUser)) | ||||
|             num = IntField(default=-1) | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|  | ||||
|         doc = Doc(num=1) | ||||
|         doc.users["007"] = EmbeddedUser(name="Agent007") | ||||
|         doc.save() | ||||
|  | ||||
|         uinfo = UInfoDocument(phone="79089269066") | ||||
|         uinfo.save() | ||||
|  | ||||
|         d = Doc.objects(num=1).first() | ||||
|         d.users["007"]["roles"]["666"] = EmbeddedRole(type="superadmin") | ||||
|         d.users["007"]["rolist"].append(EmbeddedRole(type="oops")) | ||||
|         d.users["007"]["info"] = uinfo | ||||
|         delta = d._delta() | ||||
|         assert True == ("users.007.roles.666" in delta[0]) | ||||
|         assert True == ("users.007.rolist" in delta[0]) | ||||
|         assert True == ("users.007.info" in delta[0]) | ||||
|         assert "superadmin" == delta[0]["users.007.roles.666"]["type"] | ||||
|         assert "oops" == delta[0]["users.007.rolist"][0]["type"] | ||||
|         assert uinfo.id == delta[0]["users.007.info"] | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     unittest.main() | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,633 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import unittest | ||||
| import warnings | ||||
|  | ||||
| import pytest | ||||
| from six import iteritems | ||||
|  | ||||
| from mongoengine import ( | ||||
|     BooleanField, | ||||
|     Document, | ||||
|     EmbeddedDocument, | ||||
|     EmbeddedDocumentField, | ||||
|     GenericReferenceField, | ||||
|     IntField, | ||||
|     ReferenceField, | ||||
|     StringField, | ||||
| ) | ||||
| from mongoengine.pymongo_support import list_collection_names | ||||
| from tests.fixtures import Base | ||||
| from tests.utils import MongoDBTestCase | ||||
|  | ||||
|  | ||||
| class TestInheritance(MongoDBTestCase): | ||||
|     def tearDown(self): | ||||
|         for collection in list_collection_names(self.db): | ||||
|             self.db.drop_collection(collection) | ||||
|  | ||||
|     def test_constructor_cls(self): | ||||
|         # Ensures _cls is properly set during construction | ||||
|         # and when object gets reloaded (prevent regression of #1950) | ||||
|         class EmbedData(EmbeddedDocument): | ||||
|             data = StringField() | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         class DataDoc(Document): | ||||
|             name = StringField() | ||||
|             embed = EmbeddedDocumentField(EmbedData) | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         test_doc = DataDoc(name="test", embed=EmbedData(data="data")) | ||||
|         assert test_doc._cls == "DataDoc" | ||||
|         assert test_doc.embed._cls == "EmbedData" | ||||
|         test_doc.save() | ||||
|         saved_doc = DataDoc.objects.with_id(test_doc.id) | ||||
|         assert test_doc._cls == saved_doc._cls | ||||
|         assert test_doc.embed._cls == saved_doc.embed._cls | ||||
|         test_doc.delete() | ||||
|  | ||||
|     def test_superclasses(self): | ||||
|         """Ensure that the correct list of superclasses is assembled. | ||||
|         """ | ||||
|  | ||||
|         class Animal(Document): | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         class Fish(Animal): | ||||
|             pass | ||||
|  | ||||
|         class Guppy(Fish): | ||||
|             pass | ||||
|  | ||||
|         class Mammal(Animal): | ||||
|             pass | ||||
|  | ||||
|         class Dog(Mammal): | ||||
|             pass | ||||
|  | ||||
|         class Human(Mammal): | ||||
|             pass | ||||
|  | ||||
|         assert Animal._superclasses == () | ||||
|         assert Fish._superclasses == ("Animal",) | ||||
|         assert Guppy._superclasses == ("Animal", "Animal.Fish") | ||||
|         assert Mammal._superclasses == ("Animal",) | ||||
|         assert Dog._superclasses == ("Animal", "Animal.Mammal") | ||||
|         assert Human._superclasses == ("Animal", "Animal.Mammal") | ||||
|  | ||||
|     def test_external_superclasses(self): | ||||
|         """Ensure that the correct list of super classes is assembled when | ||||
|         importing part of the model. | ||||
|         """ | ||||
|  | ||||
|         class Animal(Base): | ||||
|             pass | ||||
|  | ||||
|         class Fish(Animal): | ||||
|             pass | ||||
|  | ||||
|         class Guppy(Fish): | ||||
|             pass | ||||
|  | ||||
|         class Mammal(Animal): | ||||
|             pass | ||||
|  | ||||
|         class Dog(Mammal): | ||||
|             pass | ||||
|  | ||||
|         class Human(Mammal): | ||||
|             pass | ||||
|  | ||||
|         assert Animal._superclasses == ("Base",) | ||||
|         assert Fish._superclasses == ("Base", "Base.Animal") | ||||
|         assert Guppy._superclasses == ("Base", "Base.Animal", "Base.Animal.Fish") | ||||
|         assert Mammal._superclasses == ("Base", "Base.Animal") | ||||
|         assert Dog._superclasses == ("Base", "Base.Animal", "Base.Animal.Mammal") | ||||
|         assert Human._superclasses == ("Base", "Base.Animal", "Base.Animal.Mammal") | ||||
|  | ||||
|     def test_subclasses(self): | ||||
|         """Ensure that the correct list of _subclasses (subclasses) is | ||||
|         assembled. | ||||
|         """ | ||||
|  | ||||
|         class Animal(Document): | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         class Fish(Animal): | ||||
|             pass | ||||
|  | ||||
|         class Guppy(Fish): | ||||
|             pass | ||||
|  | ||||
|         class Mammal(Animal): | ||||
|             pass | ||||
|  | ||||
|         class Dog(Mammal): | ||||
|             pass | ||||
|  | ||||
|         class Human(Mammal): | ||||
|             pass | ||||
|  | ||||
|         assert Animal._subclasses == ( | ||||
|             "Animal", | ||||
|             "Animal.Fish", | ||||
|             "Animal.Fish.Guppy", | ||||
|             "Animal.Mammal", | ||||
|             "Animal.Mammal.Dog", | ||||
|             "Animal.Mammal.Human", | ||||
|         ) | ||||
|         assert Fish._subclasses == ("Animal.Fish", "Animal.Fish.Guppy") | ||||
|         assert Guppy._subclasses == ("Animal.Fish.Guppy",) | ||||
|         assert Mammal._subclasses == ( | ||||
|             "Animal.Mammal", | ||||
|             "Animal.Mammal.Dog", | ||||
|             "Animal.Mammal.Human", | ||||
|         ) | ||||
|         assert Human._subclasses == ("Animal.Mammal.Human",) | ||||
|  | ||||
|     def test_external_subclasses(self): | ||||
|         """Ensure that the correct list of _subclasses (subclasses) is | ||||
|         assembled when importing part of the model. | ||||
|         """ | ||||
|  | ||||
|         class Animal(Base): | ||||
|             pass | ||||
|  | ||||
|         class Fish(Animal): | ||||
|             pass | ||||
|  | ||||
|         class Guppy(Fish): | ||||
|             pass | ||||
|  | ||||
|         class Mammal(Animal): | ||||
|             pass | ||||
|  | ||||
|         class Dog(Mammal): | ||||
|             pass | ||||
|  | ||||
|         class Human(Mammal): | ||||
|             pass | ||||
|  | ||||
|         assert Animal._subclasses == ( | ||||
|             "Base.Animal", | ||||
|             "Base.Animal.Fish", | ||||
|             "Base.Animal.Fish.Guppy", | ||||
|             "Base.Animal.Mammal", | ||||
|             "Base.Animal.Mammal.Dog", | ||||
|             "Base.Animal.Mammal.Human", | ||||
|         ) | ||||
|         assert Fish._subclasses == ("Base.Animal.Fish", "Base.Animal.Fish.Guppy") | ||||
|         assert Guppy._subclasses == ("Base.Animal.Fish.Guppy",) | ||||
|         assert Mammal._subclasses == ( | ||||
|             "Base.Animal.Mammal", | ||||
|             "Base.Animal.Mammal.Dog", | ||||
|             "Base.Animal.Mammal.Human", | ||||
|         ) | ||||
|         assert Human._subclasses == ("Base.Animal.Mammal.Human",) | ||||
|  | ||||
|     def test_dynamic_declarations(self): | ||||
|         """Test that declaring an extra class updates meta data""" | ||||
|  | ||||
|         class Animal(Document): | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         assert Animal._superclasses == () | ||||
|         assert Animal._subclasses == ("Animal",) | ||||
|  | ||||
|         # Test dynamically adding a class changes the meta data | ||||
|         class Fish(Animal): | ||||
|             pass | ||||
|  | ||||
|         assert Animal._superclasses == () | ||||
|         assert Animal._subclasses == ("Animal", "Animal.Fish") | ||||
|  | ||||
|         assert Fish._superclasses == ("Animal",) | ||||
|         assert Fish._subclasses == ("Animal.Fish",) | ||||
|  | ||||
|         # Test dynamically adding an inherited class changes the meta data | ||||
|         class Pike(Fish): | ||||
|             pass | ||||
|  | ||||
|         assert Animal._superclasses == () | ||||
|         assert Animal._subclasses == ("Animal", "Animal.Fish", "Animal.Fish.Pike") | ||||
|  | ||||
|         assert Fish._superclasses == ("Animal",) | ||||
|         assert Fish._subclasses == ("Animal.Fish", "Animal.Fish.Pike") | ||||
|  | ||||
|         assert Pike._superclasses == ("Animal", "Animal.Fish") | ||||
|         assert Pike._subclasses == ("Animal.Fish.Pike",) | ||||
|  | ||||
|     def test_inheritance_meta_data(self): | ||||
|         """Ensure that document may inherit fields from a superclass document. | ||||
|         """ | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             age = IntField() | ||||
|  | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         class Employee(Person): | ||||
|             salary = IntField() | ||||
|  | ||||
|         assert ["_cls", "age", "id", "name", "salary"] == sorted( | ||||
|             Employee._fields.keys() | ||||
|         ) | ||||
|         assert Employee._get_collection_name() == Person._get_collection_name() | ||||
|  | ||||
|     def test_inheritance_to_mongo_keys(self): | ||||
|         """Ensure that document may inherit fields from a superclass document. | ||||
|         """ | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             age = IntField() | ||||
|  | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         class Employee(Person): | ||||
|             salary = IntField() | ||||
|  | ||||
|         assert ["_cls", "age", "id", "name", "salary"] == sorted( | ||||
|             Employee._fields.keys() | ||||
|         ) | ||||
|         assert Person(name="Bob", age=35).to_mongo().keys() == ["_cls", "name", "age"] | ||||
|         assert Employee(name="Bob", age=35, salary=0).to_mongo().keys() == [ | ||||
|             "_cls", | ||||
|             "name", | ||||
|             "age", | ||||
|             "salary", | ||||
|         ] | ||||
|         assert Employee._get_collection_name() == Person._get_collection_name() | ||||
|  | ||||
|     def test_indexes_and_multiple_inheritance(self): | ||||
|         """ Ensure that all of the indexes are created for a document with | ||||
|         multiple inheritance. | ||||
|         """ | ||||
|  | ||||
|         class A(Document): | ||||
|             a = StringField() | ||||
|  | ||||
|             meta = {"allow_inheritance": True, "indexes": ["a"]} | ||||
|  | ||||
|         class B(Document): | ||||
|             b = StringField() | ||||
|  | ||||
|             meta = {"allow_inheritance": True, "indexes": ["b"]} | ||||
|  | ||||
|         class C(A, B): | ||||
|             pass | ||||
|  | ||||
|         A.drop_collection() | ||||
|         B.drop_collection() | ||||
|         C.drop_collection() | ||||
|  | ||||
|         C.ensure_indexes() | ||||
|  | ||||
|         assert sorted( | ||||
|             [idx["key"] for idx in C._get_collection().index_information().values()] | ||||
|         ) == sorted( | ||||
|             [[(u"_cls", 1), (u"b", 1)], [(u"_id", 1)], [(u"_cls", 1), (u"a", 1)]] | ||||
|         ) | ||||
|  | ||||
|     def test_polymorphic_queries(self): | ||||
|         """Ensure that the correct subclasses are returned from a query | ||||
|         """ | ||||
|  | ||||
|         class Animal(Document): | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         class Fish(Animal): | ||||
|             pass | ||||
|  | ||||
|         class Mammal(Animal): | ||||
|             pass | ||||
|  | ||||
|         class Dog(Mammal): | ||||
|             pass | ||||
|  | ||||
|         class Human(Mammal): | ||||
|             pass | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|  | ||||
|         Animal().save() | ||||
|         Fish().save() | ||||
|         Mammal().save() | ||||
|         Dog().save() | ||||
|         Human().save() | ||||
|  | ||||
|         classes = [obj.__class__ for obj in Animal.objects] | ||||
|         assert classes == [Animal, Fish, Mammal, Dog, Human] | ||||
|  | ||||
|         classes = [obj.__class__ for obj in Mammal.objects] | ||||
|         assert classes == [Mammal, Dog, Human] | ||||
|  | ||||
|         classes = [obj.__class__ for obj in Human.objects] | ||||
|         assert classes == [Human] | ||||
|  | ||||
|     def test_allow_inheritance(self): | ||||
|         """Ensure that inheritance is disabled by default on simple | ||||
|         classes and that _cls will not be used. | ||||
|         """ | ||||
|  | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         # can't inherit because Animal didn't explicitly allow inheritance | ||||
|         with pytest.raises(ValueError, match="Document Animal may not be subclassed"): | ||||
|  | ||||
|             class Dog(Animal): | ||||
|                 pass | ||||
|  | ||||
|         # Check that _cls etc aren't present on simple documents | ||||
|         dog = Animal(name="dog").save() | ||||
|         assert dog.to_mongo().keys() == ["_id", "name"] | ||||
|  | ||||
|         collection = self.db[Animal._get_collection_name()] | ||||
|         obj = collection.find_one() | ||||
|         assert "_cls" not in obj | ||||
|  | ||||
|     def test_cant_turn_off_inheritance_on_subclass(self): | ||||
|         """Ensure if inheritance is on in a subclass you cant turn it off. | ||||
|         """ | ||||
|  | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         with pytest.raises(ValueError) as exc_info: | ||||
|  | ||||
|             class Mammal(Animal): | ||||
|                 meta = {"allow_inheritance": False} | ||||
|  | ||||
|         assert ( | ||||
|             str(exc_info.value) | ||||
|             == 'Only direct subclasses of Document may set "allow_inheritance" to False' | ||||
|         ) | ||||
|  | ||||
|     def test_allow_inheritance_abstract_document(self): | ||||
|         """Ensure that abstract documents can set inheritance rules and that | ||||
|         _cls will not be used. | ||||
|         """ | ||||
|  | ||||
|         class FinalDocument(Document): | ||||
|             meta = {"abstract": True, "allow_inheritance": False} | ||||
|  | ||||
|         class Animal(FinalDocument): | ||||
|             name = StringField() | ||||
|  | ||||
|         with pytest.raises(ValueError): | ||||
|  | ||||
|             class Mammal(Animal): | ||||
|                 pass | ||||
|  | ||||
|         # Check that _cls isn't present in simple documents | ||||
|         doc = Animal(name="dog") | ||||
|         assert "_cls" not in doc.to_mongo() | ||||
|  | ||||
|     def test_using_abstract_class_in_reference_field(self): | ||||
|         # Ensures no regression of #1920 | ||||
|         class AbstractHuman(Document): | ||||
|             meta = {"abstract": True} | ||||
|  | ||||
|         class Dad(AbstractHuman): | ||||
|             name = StringField() | ||||
|  | ||||
|         class Home(Document): | ||||
|             dad = ReferenceField(AbstractHuman)  # Referencing the abstract class | ||||
|             address = StringField() | ||||
|  | ||||
|         dad = Dad(name="5").save() | ||||
|         Home(dad=dad, address="street").save() | ||||
|  | ||||
|         home = Home.objects.first() | ||||
|         home.address = "garbage" | ||||
|         home.save()  # Was failing with ValidationError | ||||
|  | ||||
|     def test_abstract_class_referencing_self(self): | ||||
|         # Ensures no regression of #1920 | ||||
|         class Human(Document): | ||||
|             meta = {"abstract": True} | ||||
|             creator = ReferenceField("self", dbref=True) | ||||
|  | ||||
|         class User(Human): | ||||
|             name = StringField() | ||||
|  | ||||
|         user = User(name="John").save() | ||||
|         user2 = User(name="Foo", creator=user).save() | ||||
|  | ||||
|         user2 = User.objects.with_id(user2.id) | ||||
|         user2.name = "Bar" | ||||
|         user2.save()  # Was failing with ValidationError | ||||
|  | ||||
|     def test_abstract_handle_ids_in_metaclass_properly(self): | ||||
|         class City(Document): | ||||
|             continent = StringField() | ||||
|             meta = {"abstract": True, "allow_inheritance": False} | ||||
|  | ||||
|         class EuropeanCity(City): | ||||
|             name = StringField() | ||||
|  | ||||
|         berlin = EuropeanCity(name="Berlin", continent="Europe") | ||||
|         assert len(berlin._db_field_map) == len(berlin._fields_ordered) | ||||
|         assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered) | ||||
|         assert len(berlin._fields_ordered) == 3 | ||||
|         assert berlin._fields_ordered[0] == "id" | ||||
|  | ||||
|     def test_auto_id_not_set_if_specific_in_parent_class(self): | ||||
|         class City(Document): | ||||
|             continent = StringField() | ||||
|             city_id = IntField(primary_key=True) | ||||
|             meta = {"abstract": True, "allow_inheritance": False} | ||||
|  | ||||
|         class EuropeanCity(City): | ||||
|             name = StringField() | ||||
|  | ||||
|         berlin = EuropeanCity(name="Berlin", continent="Europe") | ||||
|         assert len(berlin._db_field_map) == len(berlin._fields_ordered) | ||||
|         assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered) | ||||
|         assert len(berlin._fields_ordered) == 3 | ||||
|         assert berlin._fields_ordered[0] == "city_id" | ||||
|  | ||||
|     def test_auto_id_vs_non_pk_id_field(self): | ||||
|         class City(Document): | ||||
|             continent = StringField() | ||||
|             id = IntField() | ||||
|             meta = {"abstract": True, "allow_inheritance": False} | ||||
|  | ||||
|         class EuropeanCity(City): | ||||
|             name = StringField() | ||||
|  | ||||
|         berlin = EuropeanCity(name="Berlin", continent="Europe") | ||||
|         assert len(berlin._db_field_map) == len(berlin._fields_ordered) | ||||
|         assert len(berlin._reverse_db_field_map) == len(berlin._fields_ordered) | ||||
|         assert len(berlin._fields_ordered) == 4 | ||||
|         assert berlin._fields_ordered[0] == "auto_id_0" | ||||
|         berlin.save() | ||||
|         assert berlin.pk == berlin.auto_id_0 | ||||
|  | ||||
|     def test_abstract_document_creation_does_not_fail(self): | ||||
|         class City(Document): | ||||
|             continent = StringField() | ||||
|             meta = {"abstract": True, "allow_inheritance": False} | ||||
|  | ||||
|         city = City(continent="asia") | ||||
|         assert city.pk is None | ||||
|         # TODO: expected error? Shouldn't we create a new error type? | ||||
|         with pytest.raises(KeyError): | ||||
|             setattr(city, "pk", 1) | ||||
|  | ||||
|     def test_allow_inheritance_embedded_document(self): | ||||
|         """Ensure embedded documents respect inheritance.""" | ||||
|  | ||||
|         class Comment(EmbeddedDocument): | ||||
|             content = StringField() | ||||
|  | ||||
|         with pytest.raises(ValueError): | ||||
|  | ||||
|             class SpecialComment(Comment): | ||||
|                 pass | ||||
|  | ||||
|         doc = Comment(content="test") | ||||
|         assert "_cls" not in doc.to_mongo() | ||||
|  | ||||
|         class Comment(EmbeddedDocument): | ||||
|             content = StringField() | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         doc = Comment(content="test") | ||||
|         assert "_cls" in doc.to_mongo() | ||||
|  | ||||
|     def test_document_inheritance(self): | ||||
|         """Ensure mutliple inheritance of abstract documents | ||||
|         """ | ||||
|  | ||||
|         class DateCreatedDocument(Document): | ||||
|             meta = {"allow_inheritance": True, "abstract": True} | ||||
|  | ||||
|         class DateUpdatedDocument(Document): | ||||
|             meta = {"allow_inheritance": True, "abstract": True} | ||||
|  | ||||
|         try: | ||||
|  | ||||
|             class MyDocument(DateCreatedDocument, DateUpdatedDocument): | ||||
|                 pass | ||||
|  | ||||
|         except Exception: | ||||
|             assert False, "Couldn't create MyDocument class" | ||||
|  | ||||
|     def test_abstract_documents(self): | ||||
|         """Ensure that a document superclass can be marked as abstract | ||||
|         thereby not using it as the name for the collection.""" | ||||
|  | ||||
|         defaults = { | ||||
|             "index_background": True, | ||||
|             "index_drop_dups": True, | ||||
|             "index_opts": {"hello": "world"}, | ||||
|             "allow_inheritance": True, | ||||
|             "queryset_class": "QuerySet", | ||||
|             "db_alias": "myDB", | ||||
|             "shard_key": ("hello", "world"), | ||||
|         } | ||||
|  | ||||
|         meta_settings = {"abstract": True} | ||||
|         meta_settings.update(defaults) | ||||
|  | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             meta = meta_settings | ||||
|  | ||||
|         class Fish(Animal): | ||||
|             pass | ||||
|  | ||||
|         class Guppy(Fish): | ||||
|             pass | ||||
|  | ||||
|         class Mammal(Animal): | ||||
|             meta = {"abstract": True} | ||||
|  | ||||
|         class Human(Mammal): | ||||
|             pass | ||||
|  | ||||
|         for k, v in iteritems(defaults): | ||||
|             for cls in [Animal, Fish, Guppy]: | ||||
|                 assert cls._meta[k] == v | ||||
|  | ||||
|         assert "collection" not in Animal._meta | ||||
|         assert "collection" not in Mammal._meta | ||||
|  | ||||
|         assert Animal._get_collection_name() is None | ||||
|         assert Mammal._get_collection_name() is None | ||||
|  | ||||
|         assert Fish._get_collection_name() == "fish" | ||||
|         assert Guppy._get_collection_name() == "fish" | ||||
|         assert Human._get_collection_name() == "human" | ||||
|  | ||||
|         # ensure that a subclass of a non-abstract class can't be abstract | ||||
|         with pytest.raises(ValueError): | ||||
|  | ||||
|             class EvilHuman(Human): | ||||
|                 evil = BooleanField(default=True) | ||||
|                 meta = {"abstract": True} | ||||
|  | ||||
|     def test_abstract_embedded_documents(self): | ||||
|         # 789: EmbeddedDocument shouldn't inherit abstract | ||||
|         class A(EmbeddedDocument): | ||||
|             meta = {"abstract": True} | ||||
|  | ||||
|         class B(A): | ||||
|             pass | ||||
|  | ||||
|         assert not B._meta["abstract"] | ||||
|  | ||||
|     def test_inherited_collections(self): | ||||
|         """Ensure that subclassed documents don't override parents' | ||||
|         collections | ||||
|         """ | ||||
|  | ||||
|         class Drink(Document): | ||||
|             name = StringField() | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         class Drinker(Document): | ||||
|             drink = GenericReferenceField() | ||||
|  | ||||
|         try: | ||||
|             warnings.simplefilter("error") | ||||
|  | ||||
|             class AcloholicDrink(Drink): | ||||
|                 meta = {"collection": "booze"} | ||||
|  | ||||
|         except SyntaxWarning: | ||||
|             warnings.simplefilter("ignore") | ||||
|  | ||||
|             class AlcoholicDrink(Drink): | ||||
|                 meta = {"collection": "booze"} | ||||
|  | ||||
|         else: | ||||
|             raise AssertionError("SyntaxWarning should be triggered") | ||||
|  | ||||
|         warnings.resetwarnings() | ||||
|  | ||||
|         Drink.drop_collection() | ||||
|         AlcoholicDrink.drop_collection() | ||||
|         Drinker.drop_collection() | ||||
|  | ||||
|         red_bull = Drink(name="Red Bull") | ||||
|         red_bull.save() | ||||
|  | ||||
|         programmer = Drinker(drink=red_bull) | ||||
|         programmer.save() | ||||
|  | ||||
|         beer = AlcoholicDrink(name="Beer") | ||||
|         beer.save() | ||||
|         real_person = Drinker(drink=beer) | ||||
|         real_person.save() | ||||
|  | ||||
|         assert Drinker.objects[0].drink.name == red_bull.name | ||||
|         assert Drinker.objects[1].drink.name == beer.name | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     unittest.main() | ||||
| @@ -2,60 +2,55 @@ | ||||
| import unittest | ||||
| from datetime import datetime | ||||
| 
 | ||||
| import pytest | ||||
| 
 | ||||
| from mongoengine import * | ||||
| from tests.utils import MongoDBTestCase | ||||
| 
 | ||||
| __all__ = ("ValidatorErrorTest",) | ||||
| 
 | ||||
| 
 | ||||
| class TestValidatorError(MongoDBTestCase): | ||||
| class ValidatorErrorTest(unittest.TestCase): | ||||
| 
 | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
| 
 | ||||
|     def test_to_dict(self): | ||||
|         """Ensure a ValidationError handles error to_dict correctly. | ||||
|         """ | ||||
|         error = ValidationError("root") | ||||
|         assert error.to_dict() == {} | ||||
|         error = ValidationError('root') | ||||
|         self.assertEqual(error.to_dict(), {}) | ||||
| 
 | ||||
|         # 1st level error schema | ||||
|         error.errors = {"1st": ValidationError("bad 1st")} | ||||
|         assert "1st" in error.to_dict() | ||||
|         assert error.to_dict()["1st"] == "bad 1st" | ||||
|         error.errors = {'1st': ValidationError('bad 1st'), } | ||||
|         self.assertTrue('1st' in error.to_dict()) | ||||
|         self.assertEqual(error.to_dict()['1st'], 'bad 1st') | ||||
| 
 | ||||
|         # 2nd level error schema | ||||
|         error.errors = { | ||||
|             "1st": ValidationError( | ||||
|                 "bad 1st", errors={"2nd": ValidationError("bad 2nd")} | ||||
|             ) | ||||
|         } | ||||
|         assert "1st" in error.to_dict() | ||||
|         assert isinstance(error.to_dict()["1st"], dict) | ||||
|         assert "2nd" in error.to_dict()["1st"] | ||||
|         assert error.to_dict()["1st"]["2nd"] == "bad 2nd" | ||||
|         error.errors = {'1st': ValidationError('bad 1st', errors={ | ||||
|             '2nd': ValidationError('bad 2nd'), | ||||
|         })} | ||||
|         self.assertTrue('1st' in error.to_dict()) | ||||
|         self.assertTrue(isinstance(error.to_dict()['1st'], dict)) | ||||
|         self.assertTrue('2nd' in error.to_dict()['1st']) | ||||
|         self.assertEqual(error.to_dict()['1st']['2nd'], 'bad 2nd') | ||||
| 
 | ||||
|         # moar levels | ||||
|         error.errors = { | ||||
|             "1st": ValidationError( | ||||
|                 "bad 1st", | ||||
|                 errors={ | ||||
|                     "2nd": ValidationError( | ||||
|                         "bad 2nd", | ||||
|                         errors={ | ||||
|                             "3rd": ValidationError( | ||||
|                                 "bad 3rd", errors={"4th": ValidationError("Inception")} | ||||
|                             ) | ||||
|                         }, | ||||
|                     ) | ||||
|                 }, | ||||
|             ) | ||||
|         } | ||||
|         assert "1st" in error.to_dict() | ||||
|         assert "2nd" in error.to_dict()["1st"] | ||||
|         assert "3rd" in error.to_dict()["1st"]["2nd"] | ||||
|         assert "4th" in error.to_dict()["1st"]["2nd"]["3rd"] | ||||
|         assert error.to_dict()["1st"]["2nd"]["3rd"]["4th"] == "Inception" | ||||
|         error.errors = {'1st': ValidationError('bad 1st', errors={ | ||||
|             '2nd': ValidationError('bad 2nd', errors={ | ||||
|                 '3rd': ValidationError('bad 3rd', errors={ | ||||
|                     '4th': ValidationError('Inception'), | ||||
|                 }), | ||||
|             }), | ||||
|         })} | ||||
|         self.assertTrue('1st' in error.to_dict()) | ||||
|         self.assertTrue('2nd' in error.to_dict()['1st']) | ||||
|         self.assertTrue('3rd' in error.to_dict()['1st']['2nd']) | ||||
|         self.assertTrue('4th' in error.to_dict()['1st']['2nd']['3rd']) | ||||
|         self.assertEqual(error.to_dict()['1st']['2nd']['3rd']['4th'], | ||||
|                          'Inception') | ||||
| 
 | ||||
|         assert error.message == "root(2nd.3rd.4th.Inception: ['1st'])" | ||||
|         self.assertEqual(error.message, "root(2nd.3rd.4th.Inception: ['1st'])") | ||||
| 
 | ||||
|     def test_model_validation(self): | ||||
| 
 | ||||
|         class User(Document): | ||||
|             username = StringField(primary_key=True) | ||||
|             name = StringField(required=True) | ||||
| @@ -63,69 +58,67 @@ class TestValidatorError(MongoDBTestCase): | ||||
|         try: | ||||
|             User().validate() | ||||
|         except ValidationError as e: | ||||
|             assert "User:None" in e.message | ||||
|             assert e.to_dict() == { | ||||
|                 "username": "Field is required", | ||||
|                 "name": "Field is required", | ||||
|             } | ||||
|             self.assertTrue("User:None" in e.message) | ||||
|             self.assertEqual(e.to_dict(), { | ||||
|                 'username': 'Field is required', | ||||
|                 'name': 'Field is required'}) | ||||
| 
 | ||||
|         user = User(username="RossC0", name="Ross").save() | ||||
|         user.name = None | ||||
|         try: | ||||
|             user.save() | ||||
|         except ValidationError as e: | ||||
|             assert "User:RossC0" in e.message | ||||
|             assert e.to_dict() == {"name": "Field is required"} | ||||
|             self.assertTrue("User:RossC0" in e.message) | ||||
|             self.assertEqual(e.to_dict(), { | ||||
|                 'name': 'Field is required'}) | ||||
| 
 | ||||
|     def test_fields_rewrite(self): | ||||
|         class BasePerson(Document): | ||||
|             name = StringField() | ||||
|             age = IntField() | ||||
|             meta = {"abstract": True} | ||||
|             meta = {'abstract': True} | ||||
| 
 | ||||
|         class Person(BasePerson): | ||||
|             name = StringField(required=True) | ||||
| 
 | ||||
|         p = Person(age=15) | ||||
|         with pytest.raises(ValidationError): | ||||
|             p.validate() | ||||
|         self.assertRaises(ValidationError, p.validate) | ||||
| 
 | ||||
|     def test_embedded_document_validation(self): | ||||
|         """Ensure that embedded documents may be validated. | ||||
|         """ | ||||
| 
 | ||||
|         class Comment(EmbeddedDocument): | ||||
|             date = DateTimeField() | ||||
|             content = StringField(required=True) | ||||
| 
 | ||||
|         comment = Comment() | ||||
|         with pytest.raises(ValidationError): | ||||
|             comment.validate() | ||||
|         self.assertRaises(ValidationError, comment.validate) | ||||
| 
 | ||||
|         comment.content = "test" | ||||
|         comment.content = 'test' | ||||
|         comment.validate() | ||||
| 
 | ||||
|         comment.date = 4 | ||||
|         with pytest.raises(ValidationError): | ||||
|             comment.validate() | ||||
|         self.assertRaises(ValidationError, comment.validate) | ||||
| 
 | ||||
|         comment.date = datetime.now() | ||||
|         comment.validate() | ||||
|         assert comment._instance is None | ||||
|         self.assertEqual(comment._instance, None) | ||||
| 
 | ||||
|     def test_embedded_db_field_validate(self): | ||||
| 
 | ||||
|         class SubDoc(EmbeddedDocument): | ||||
|             val = IntField(required=True) | ||||
| 
 | ||||
|         class Doc(Document): | ||||
|             id = StringField(primary_key=True) | ||||
|             e = EmbeddedDocumentField(SubDoc, db_field="eb") | ||||
|             e = EmbeddedDocumentField(SubDoc, db_field='eb') | ||||
| 
 | ||||
|         try: | ||||
|             Doc(id="bad").validate() | ||||
|         except ValidationError as e: | ||||
|             assert "SubDoc:None" in e.message | ||||
|             assert e.to_dict() == {"e": {"val": "OK could not be converted to int"}} | ||||
|             self.assertTrue("SubDoc:None" in e.message) | ||||
|             self.assertEqual(e.to_dict(), { | ||||
|                 "e": {'val': 'OK could not be converted to int'}}) | ||||
| 
 | ||||
|         Doc.drop_collection() | ||||
| 
 | ||||
| @@ -133,23 +126,25 @@ class TestValidatorError(MongoDBTestCase): | ||||
| 
 | ||||
|         doc = Doc.objects.first() | ||||
|         keys = doc._data.keys() | ||||
|         assert 2 == len(keys) | ||||
|         assert "e" in keys | ||||
|         assert "id" in keys | ||||
|         self.assertEqual(2, len(keys)) | ||||
|         self.assertTrue('e' in keys) | ||||
|         self.assertTrue('id' in keys) | ||||
| 
 | ||||
|         doc.e.val = "OK" | ||||
|         try: | ||||
|             doc.save() | ||||
|         except ValidationError as e: | ||||
|             assert "Doc:test" in e.message | ||||
|             assert e.to_dict() == {"e": {"val": "OK could not be converted to int"}} | ||||
|             self.assertTrue("Doc:test" in e.message) | ||||
|             self.assertEqual(e.to_dict(), { | ||||
|                 "e": {'val': 'OK could not be converted to int'}}) | ||||
| 
 | ||||
|     def test_embedded_weakref(self): | ||||
| 
 | ||||
|         class SubDoc(EmbeddedDocument): | ||||
|             val = IntField(required=True) | ||||
| 
 | ||||
|         class Doc(Document): | ||||
|             e = EmbeddedDocumentField(SubDoc, db_field="eb") | ||||
|             e = EmbeddedDocumentField(SubDoc, db_field='eb') | ||||
| 
 | ||||
|         Doc.drop_collection() | ||||
| 
 | ||||
| @@ -158,26 +153,23 @@ class TestValidatorError(MongoDBTestCase): | ||||
| 
 | ||||
|         s = SubDoc() | ||||
| 
 | ||||
|         with pytest.raises(ValidationError): | ||||
|             s.validate() | ||||
|         self.assertRaises(ValidationError, s.validate) | ||||
| 
 | ||||
|         d1.e = s | ||||
|         d2.e = s | ||||
| 
 | ||||
|         del d1 | ||||
| 
 | ||||
|         with pytest.raises(ValidationError): | ||||
|             d2.validate() | ||||
|         self.assertRaises(ValidationError, d2.validate) | ||||
| 
 | ||||
|     def test_parent_reference_in_child_document(self): | ||||
|         """ | ||||
|         Test to ensure a ReferenceField can store a reference to a parent | ||||
|         class when inherited. Issue #954. | ||||
|         """ | ||||
| 
 | ||||
|         class Parent(Document): | ||||
|             meta = {"allow_inheritance": True} | ||||
|             reference = ReferenceField("self") | ||||
|             meta = {'allow_inheritance': True} | ||||
|             reference = ReferenceField('self') | ||||
| 
 | ||||
|         class Child(Parent): | ||||
|             pass | ||||
| @@ -198,10 +190,9 @@ class TestValidatorError(MongoDBTestCase): | ||||
|         Test to ensure a ReferenceField can store a reference to a parent | ||||
|         class when inherited and when set via attribute. Issue #954. | ||||
|         """ | ||||
| 
 | ||||
|         class Parent(Document): | ||||
|             meta = {"allow_inheritance": True} | ||||
|             reference = ReferenceField("self") | ||||
|             meta = {'allow_inheritance': True} | ||||
|             reference = ReferenceField('self') | ||||
| 
 | ||||
|         class Child(Parent): | ||||
|             pass | ||||
| @@ -219,5 +210,5 @@ class TestValidatorError(MongoDBTestCase): | ||||
|             self.fail("ValidationError raised: %s" % e.message) | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == "__main__": | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
| @@ -0,0 +1,3 @@ | ||||
| from fields import * | ||||
| from file_tests import * | ||||
| from geo import * | ||||
|   | ||||
							
								
								
									
										4580
									
								
								tests/fields/fields.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										4580
									
								
								tests/fields/fields.py
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,52 +1,39 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import copy | ||||
| import os | ||||
| import tempfile | ||||
| import unittest | ||||
| import tempfile | ||||
| 
 | ||||
| import gridfs | ||||
| import pytest | ||||
| import six | ||||
| 
 | ||||
| from nose.plugins.skip import SkipTest | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.python_support import StringIO | ||||
| 
 | ||||
| try: | ||||
|     from PIL import Image | ||||
| 
 | ||||
|     HAS_PIL = True | ||||
| except ImportError: | ||||
|     HAS_PIL = False | ||||
| 
 | ||||
| from tests.utils import MongoDBTestCase | ||||
| 
 | ||||
| require_pil = pytest.mark.skipif(not HAS_PIL, reason="PIL not installed") | ||||
| 
 | ||||
| TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), "mongoengine.png") | ||||
| TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), "mongodb_leaf.png") | ||||
| TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') | ||||
| TEST_IMAGE2_PATH = os.path.join(os.path.dirname(__file__), 'mongodb_leaf.png') | ||||
| 
 | ||||
| 
 | ||||
| def get_file(path): | ||||
|     """Use a BytesIO instead of a file to allow | ||||
|     to have a one-liner and avoid that the file remains opened""" | ||||
|     bytes_io = StringIO() | ||||
|     with open(path, "rb") as f: | ||||
|         bytes_io.write(f.read()) | ||||
|     bytes_io.seek(0) | ||||
|     return bytes_io | ||||
| class FileTest(MongoDBTestCase): | ||||
| 
 | ||||
| 
 | ||||
| class TestFileField(MongoDBTestCase): | ||||
|     def tearDown(self): | ||||
|         self.db.drop_collection("fs.files") | ||||
|         self.db.drop_collection("fs.chunks") | ||||
|         self.db.drop_collection('fs.files') | ||||
|         self.db.drop_collection('fs.chunks') | ||||
| 
 | ||||
|     def test_file_field_optional(self): | ||||
|         # Make sure FileField is optional and not required | ||||
|         class DemoFile(Document): | ||||
|             the_file = FileField() | ||||
| 
 | ||||
|         DemoFile.objects.create() | ||||
| 
 | ||||
|     def test_file_fields(self): | ||||
| @@ -58,21 +45,18 @@ class TestFileField(MongoDBTestCase): | ||||
| 
 | ||||
|         PutFile.drop_collection() | ||||
| 
 | ||||
|         text = six.b("Hello, World!") | ||||
|         content_type = "text/plain" | ||||
|         text = six.b('Hello, World!') | ||||
|         content_type = 'text/plain' | ||||
| 
 | ||||
|         putfile = PutFile() | ||||
|         putfile.the_file.put(text, content_type=content_type, filename="hello") | ||||
|         putfile.save() | ||||
| 
 | ||||
|         result = PutFile.objects.first() | ||||
|         assert putfile == result | ||||
|         assert ( | ||||
|             "%s" % result.the_file | ||||
|             == "<GridFSProxy: hello (%s)>" % result.the_file.grid_id | ||||
|         ) | ||||
|         assert result.the_file.read() == text | ||||
|         assert result.the_file.content_type == content_type | ||||
|         self.assertTrue(putfile == result) | ||||
|         self.assertEqual("%s" % result.the_file, "<GridFSProxy: hello>") | ||||
|         self.assertEqual(result.the_file.read(), text) | ||||
|         self.assertEqual(result.the_file.content_type, content_type) | ||||
|         result.the_file.delete()  # Remove file from GridFS | ||||
|         PutFile.objects.delete() | ||||
| 
 | ||||
| @@ -87,23 +71,22 @@ class TestFileField(MongoDBTestCase): | ||||
|         putfile.save() | ||||
| 
 | ||||
|         result = PutFile.objects.first() | ||||
|         assert putfile == result | ||||
|         assert result.the_file.read() == text | ||||
|         assert result.the_file.content_type == content_type | ||||
|         self.assertTrue(putfile == result) | ||||
|         self.assertEqual(result.the_file.read(), text) | ||||
|         self.assertEqual(result.the_file.content_type, content_type) | ||||
|         result.the_file.delete() | ||||
| 
 | ||||
|     def test_file_fields_stream(self): | ||||
|         """Ensure that file fields can be written to and their data retrieved | ||||
|         """ | ||||
| 
 | ||||
|         class StreamFile(Document): | ||||
|             the_file = FileField() | ||||
| 
 | ||||
|         StreamFile.drop_collection() | ||||
| 
 | ||||
|         text = six.b("Hello, World!") | ||||
|         more_text = six.b("Foo Bar") | ||||
|         content_type = "text/plain" | ||||
|         text = six.b('Hello, World!') | ||||
|         more_text = six.b('Foo Bar') | ||||
|         content_type = 'text/plain' | ||||
| 
 | ||||
|         streamfile = StreamFile() | ||||
|         streamfile.the_file.new_file(content_type=content_type) | ||||
| @@ -113,32 +96,32 @@ class TestFileField(MongoDBTestCase): | ||||
|         streamfile.save() | ||||
| 
 | ||||
|         result = StreamFile.objects.first() | ||||
|         assert streamfile == result | ||||
|         assert result.the_file.read() == text + more_text | ||||
|         assert result.the_file.content_type == content_type | ||||
|         self.assertTrue(streamfile == result) | ||||
|         self.assertEqual(result.the_file.read(), text + more_text) | ||||
|         self.assertEqual(result.the_file.content_type, content_type) | ||||
|         result.the_file.seek(0) | ||||
|         assert result.the_file.tell() == 0 | ||||
|         assert result.the_file.read(len(text)) == text | ||||
|         assert result.the_file.tell() == len(text) | ||||
|         assert result.the_file.read(len(more_text)) == more_text | ||||
|         assert result.the_file.tell() == len(text + more_text) | ||||
|         self.assertEqual(result.the_file.tell(), 0) | ||||
|         self.assertEqual(result.the_file.read(len(text)), text) | ||||
|         self.assertEqual(result.the_file.tell(), len(text)) | ||||
|         self.assertEqual(result.the_file.read(len(more_text)), more_text) | ||||
|         self.assertEqual(result.the_file.tell(), len(text + more_text)) | ||||
|         result.the_file.delete() | ||||
| 
 | ||||
|         # Ensure deleted file returns None | ||||
|         assert result.the_file.read() is None | ||||
|         self.assertTrue(result.the_file.read() is None) | ||||
| 
 | ||||
|     def test_file_fields_stream_after_none(self): | ||||
|         """Ensure that a file field can be written to after it has been saved as | ||||
|         None | ||||
|         """ | ||||
| 
 | ||||
|         class StreamFile(Document): | ||||
|             the_file = FileField() | ||||
| 
 | ||||
|         StreamFile.drop_collection() | ||||
| 
 | ||||
|         text = six.b("Hello, World!") | ||||
|         more_text = six.b("Foo Bar") | ||||
|         text = six.b('Hello, World!') | ||||
|         more_text = six.b('Foo Bar') | ||||
|         content_type = 'text/plain' | ||||
| 
 | ||||
|         streamfile = StreamFile() | ||||
|         streamfile.save() | ||||
| @@ -149,26 +132,27 @@ class TestFileField(MongoDBTestCase): | ||||
|         streamfile.save() | ||||
| 
 | ||||
|         result = StreamFile.objects.first() | ||||
|         assert streamfile == result | ||||
|         assert result.the_file.read() == text + more_text | ||||
|         # assert result.the_file.content_type == content_type | ||||
|         self.assertTrue(streamfile == result) | ||||
|         self.assertEqual(result.the_file.read(), text + more_text) | ||||
|         # self.assertEqual(result.the_file.content_type, content_type) | ||||
|         result.the_file.seek(0) | ||||
|         assert result.the_file.tell() == 0 | ||||
|         assert result.the_file.read(len(text)) == text | ||||
|         assert result.the_file.tell() == len(text) | ||||
|         assert result.the_file.read(len(more_text)) == more_text | ||||
|         assert result.the_file.tell() == len(text + more_text) | ||||
|         self.assertEqual(result.the_file.tell(), 0) | ||||
|         self.assertEqual(result.the_file.read(len(text)), text) | ||||
|         self.assertEqual(result.the_file.tell(), len(text)) | ||||
|         self.assertEqual(result.the_file.read(len(more_text)), more_text) | ||||
|         self.assertEqual(result.the_file.tell(), len(text + more_text)) | ||||
|         result.the_file.delete() | ||||
| 
 | ||||
|         # Ensure deleted file returns None | ||||
|         assert result.the_file.read() is None | ||||
|         self.assertTrue(result.the_file.read() is None) | ||||
| 
 | ||||
|     def test_file_fields_set(self): | ||||
| 
 | ||||
|         class SetFile(Document): | ||||
|             the_file = FileField() | ||||
| 
 | ||||
|         text = six.b("Hello, World!") | ||||
|         more_text = six.b("Foo Bar") | ||||
|         text = six.b('Hello, World!') | ||||
|         more_text = six.b('Foo Bar') | ||||
| 
 | ||||
|         SetFile.drop_collection() | ||||
| 
 | ||||
| @@ -177,19 +161,20 @@ class TestFileField(MongoDBTestCase): | ||||
|         setfile.save() | ||||
| 
 | ||||
|         result = SetFile.objects.first() | ||||
|         assert setfile == result | ||||
|         assert result.the_file.read() == text | ||||
|         self.assertTrue(setfile == result) | ||||
|         self.assertEqual(result.the_file.read(), text) | ||||
| 
 | ||||
|         # Try replacing file with new one | ||||
|         result.the_file.replace(more_text) | ||||
|         result.save() | ||||
| 
 | ||||
|         result = SetFile.objects.first() | ||||
|         assert setfile == result | ||||
|         assert result.the_file.read() == more_text | ||||
|         self.assertTrue(setfile == result) | ||||
|         self.assertEqual(result.the_file.read(), more_text) | ||||
|         result.the_file.delete() | ||||
| 
 | ||||
|     def test_file_field_no_default(self): | ||||
| 
 | ||||
|         class GridDocument(Document): | ||||
|             the_file = FileField() | ||||
| 
 | ||||
| @@ -204,35 +189,34 @@ class TestFileField(MongoDBTestCase): | ||||
|             doc_a.save() | ||||
| 
 | ||||
|             doc_b = GridDocument.objects.with_id(doc_a.id) | ||||
|             doc_b.the_file.replace(f, filename="doc_b") | ||||
|             doc_b.the_file.replace(f, filename='doc_b') | ||||
|             doc_b.save() | ||||
|             assert doc_b.the_file.grid_id is not None | ||||
|             self.assertNotEqual(doc_b.the_file.grid_id, None) | ||||
| 
 | ||||
|             # Test it matches | ||||
|             doc_c = GridDocument.objects.with_id(doc_b.id) | ||||
|             assert doc_b.the_file.grid_id == doc_c.the_file.grid_id | ||||
|             self.assertEqual(doc_b.the_file.grid_id, doc_c.the_file.grid_id) | ||||
| 
 | ||||
|             # Test with default | ||||
|             doc_d = GridDocument(the_file=six.b("")) | ||||
|             doc_d = GridDocument(the_file=six.b('')) | ||||
|             doc_d.save() | ||||
| 
 | ||||
|             doc_e = GridDocument.objects.with_id(doc_d.id) | ||||
|             assert doc_d.the_file.grid_id == doc_e.the_file.grid_id | ||||
|             self.assertEqual(doc_d.the_file.grid_id, doc_e.the_file.grid_id) | ||||
| 
 | ||||
|             doc_e.the_file.replace(f, filename="doc_e") | ||||
|             doc_e.the_file.replace(f, filename='doc_e') | ||||
|             doc_e.save() | ||||
| 
 | ||||
|             doc_f = GridDocument.objects.with_id(doc_e.id) | ||||
|             assert doc_e.the_file.grid_id == doc_f.the_file.grid_id | ||||
|             self.assertEqual(doc_e.the_file.grid_id, doc_f.the_file.grid_id) | ||||
| 
 | ||||
|         db = GridDocument._get_db() | ||||
|         grid_fs = gridfs.GridFS(db) | ||||
|         assert ["doc_b", "doc_e"] == grid_fs.list() | ||||
|         self.assertEqual(['doc_b', 'doc_e'], grid_fs.list()) | ||||
| 
 | ||||
|     def test_file_uniqueness(self): | ||||
|         """Ensure that each instance of a FileField is unique | ||||
|         """ | ||||
| 
 | ||||
|         class TestFile(Document): | ||||
|             name = StringField() | ||||
|             the_file = FileField() | ||||
| @@ -240,15 +224,15 @@ class TestFileField(MongoDBTestCase): | ||||
|         # First instance | ||||
|         test_file = TestFile() | ||||
|         test_file.name = "Hello, World!" | ||||
|         test_file.the_file.put(six.b("Hello, World!")) | ||||
|         test_file.the_file.put(six.b('Hello, World!')) | ||||
|         test_file.save() | ||||
| 
 | ||||
|         # Second instance | ||||
|         test_file_dupe = TestFile() | ||||
|         data = test_file_dupe.the_file.read()  # Should be None | ||||
| 
 | ||||
|         assert test_file.name != test_file_dupe.name | ||||
|         assert test_file.the_file.read() != data | ||||
|         self.assertTrue(test_file.name != test_file_dupe.name) | ||||
|         self.assertTrue(test_file.the_file.read() != data) | ||||
| 
 | ||||
|         TestFile.drop_collection() | ||||
| 
 | ||||
| @@ -261,66 +245,61 @@ class TestFileField(MongoDBTestCase): | ||||
|             photo = FileField() | ||||
| 
 | ||||
|         Animal.drop_collection() | ||||
|         marmot = Animal(genus="Marmota", family="Sciuridae") | ||||
|         marmot = Animal(genus='Marmota', family='Sciuridae') | ||||
| 
 | ||||
|         marmot_photo_content = get_file(TEST_IMAGE_PATH)  # Retrieve a photo from disk | ||||
|         marmot.photo.put(marmot_photo_content, content_type="image/jpeg", foo="bar") | ||||
|         marmot_photo = open(TEST_IMAGE_PATH, 'rb')  # Retrieve a photo from disk | ||||
|         marmot.photo.put(marmot_photo, content_type='image/jpeg', foo='bar') | ||||
|         marmot.photo.close() | ||||
|         marmot.save() | ||||
| 
 | ||||
|         marmot = Animal.objects.get() | ||||
|         assert marmot.photo.content_type == "image/jpeg" | ||||
|         assert marmot.photo.foo == "bar" | ||||
|         self.assertEqual(marmot.photo.content_type, 'image/jpeg') | ||||
|         self.assertEqual(marmot.photo.foo, 'bar') | ||||
| 
 | ||||
|     def test_file_reassigning(self): | ||||
|         class TestFile(Document): | ||||
|             the_file = FileField() | ||||
| 
 | ||||
|         TestFile.drop_collection() | ||||
| 
 | ||||
|         test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save() | ||||
|         assert test_file.the_file.get().length == 8313 | ||||
|         test_file = TestFile(the_file=open(TEST_IMAGE_PATH, 'rb')).save() | ||||
|         self.assertEqual(test_file.the_file.get().length, 8313) | ||||
| 
 | ||||
|         test_file = TestFile.objects.first() | ||||
|         test_file.the_file = get_file(TEST_IMAGE2_PATH) | ||||
|         test_file.the_file = open(TEST_IMAGE2_PATH, 'rb') | ||||
|         test_file.save() | ||||
|         assert test_file.the_file.get().length == 4971 | ||||
|         self.assertEqual(test_file.the_file.get().length, 4971) | ||||
| 
 | ||||
|     def test_file_boolean(self): | ||||
|         """Ensure that a boolean test of a FileField indicates its presence | ||||
|         """ | ||||
| 
 | ||||
|         class TestFile(Document): | ||||
|             the_file = FileField() | ||||
| 
 | ||||
|         TestFile.drop_collection() | ||||
| 
 | ||||
|         test_file = TestFile() | ||||
|         assert not bool(test_file.the_file) | ||||
|         test_file.the_file.put(six.b("Hello, World!"), content_type="text/plain") | ||||
|         self.assertFalse(bool(test_file.the_file)) | ||||
|         test_file.the_file.put(six.b('Hello, World!'), content_type='text/plain') | ||||
|         test_file.save() | ||||
|         assert bool(test_file.the_file) | ||||
|         self.assertTrue(bool(test_file.the_file)) | ||||
| 
 | ||||
|         test_file = TestFile.objects.first() | ||||
|         assert test_file.the_file.content_type == "text/plain" | ||||
|         self.assertEqual(test_file.the_file.content_type, "text/plain") | ||||
| 
 | ||||
|     def test_file_cmp(self): | ||||
|         """Test comparing against other types""" | ||||
| 
 | ||||
|         class TestFile(Document): | ||||
|             the_file = FileField() | ||||
| 
 | ||||
|         test_file = TestFile() | ||||
|         assert test_file.the_file not in [{"test": 1}] | ||||
|         self.assertFalse(test_file.the_file in [{"test": 1}]) | ||||
| 
 | ||||
|     def test_file_disk_space(self): | ||||
|         """ Test disk space usage when we delete/replace a file """ | ||||
| 
 | ||||
|         class TestFile(Document): | ||||
|             the_file = FileField() | ||||
| 
 | ||||
|         text = six.b("Hello, World!") | ||||
|         content_type = "text/plain" | ||||
|         text = six.b('Hello, World!') | ||||
|         content_type = 'text/plain' | ||||
| 
 | ||||
|         testfile = TestFile() | ||||
|         testfile.the_file.put(text, content_type=content_type, filename="hello") | ||||
| @@ -331,16 +310,16 @@ class TestFileField(MongoDBTestCase): | ||||
| 
 | ||||
|         files = db.fs.files.find() | ||||
|         chunks = db.fs.chunks.find() | ||||
|         assert len(list(files)) == 1 | ||||
|         assert len(list(chunks)) == 1 | ||||
|         self.assertEquals(len(list(files)), 1) | ||||
|         self.assertEquals(len(list(chunks)), 1) | ||||
| 
 | ||||
|         # Deleting the docoument should delete the files | ||||
|         testfile.delete() | ||||
| 
 | ||||
|         files = db.fs.files.find() | ||||
|         chunks = db.fs.chunks.find() | ||||
|         assert len(list(files)) == 0 | ||||
|         assert len(list(chunks)) == 0 | ||||
|         self.assertEquals(len(list(files)), 0) | ||||
|         self.assertEquals(len(list(chunks)), 0) | ||||
| 
 | ||||
|         # Test case where we don't store a file in the first place | ||||
|         testfile = TestFile() | ||||
| @@ -348,39 +327,41 @@ class TestFileField(MongoDBTestCase): | ||||
| 
 | ||||
|         files = db.fs.files.find() | ||||
|         chunks = db.fs.chunks.find() | ||||
|         assert len(list(files)) == 0 | ||||
|         assert len(list(chunks)) == 0 | ||||
|         self.assertEquals(len(list(files)), 0) | ||||
|         self.assertEquals(len(list(chunks)), 0) | ||||
| 
 | ||||
|         testfile.delete() | ||||
| 
 | ||||
|         files = db.fs.files.find() | ||||
|         chunks = db.fs.chunks.find() | ||||
|         assert len(list(files)) == 0 | ||||
|         assert len(list(chunks)) == 0 | ||||
|         self.assertEquals(len(list(files)), 0) | ||||
|         self.assertEquals(len(list(chunks)), 0) | ||||
| 
 | ||||
|         # Test case where we overwrite the file | ||||
|         testfile = TestFile() | ||||
|         testfile.the_file.put(text, content_type=content_type, filename="hello") | ||||
|         testfile.save() | ||||
| 
 | ||||
|         text = six.b("Bonjour, World!") | ||||
|         text = six.b('Bonjour, World!') | ||||
|         testfile.the_file.replace(text, content_type=content_type, filename="hello") | ||||
|         testfile.save() | ||||
| 
 | ||||
|         files = db.fs.files.find() | ||||
|         chunks = db.fs.chunks.find() | ||||
|         assert len(list(files)) == 1 | ||||
|         assert len(list(chunks)) == 1 | ||||
|         self.assertEquals(len(list(files)), 1) | ||||
|         self.assertEquals(len(list(chunks)), 1) | ||||
| 
 | ||||
|         testfile.delete() | ||||
| 
 | ||||
|         files = db.fs.files.find() | ||||
|         chunks = db.fs.chunks.find() | ||||
|         assert len(list(files)) == 0 | ||||
|         assert len(list(chunks)) == 0 | ||||
|         self.assertEquals(len(list(files)), 0) | ||||
|         self.assertEquals(len(list(chunks)), 0) | ||||
| 
 | ||||
|     @require_pil | ||||
|     def test_image_field(self): | ||||
|         if not HAS_PIL: | ||||
|             raise SkipTest('PIL not installed') | ||||
| 
 | ||||
|         class TestImage(Document): | ||||
|             image = ImageField() | ||||
| 
 | ||||
| @@ -395,104 +376,112 @@ class TestFileField(MongoDBTestCase): | ||||
|                 t.image.put(f) | ||||
|                 self.fail("Should have raised an invalidation error") | ||||
|             except ValidationError as e: | ||||
|                 assert "%s" % e == "Invalid image: cannot identify image file %s" % f | ||||
|                 self.assertEqual("%s" % e, "Invalid image: cannot identify image file %s" % f) | ||||
| 
 | ||||
|         t = TestImage() | ||||
|         t.image.put(get_file(TEST_IMAGE_PATH)) | ||||
|         t.image.put(open(TEST_IMAGE_PATH, 'rb')) | ||||
|         t.save() | ||||
| 
 | ||||
|         t = TestImage.objects.first() | ||||
| 
 | ||||
|         assert t.image.format == "PNG" | ||||
|         self.assertEqual(t.image.format, 'PNG') | ||||
| 
 | ||||
|         w, h = t.image.size | ||||
|         assert w == 371 | ||||
|         assert h == 76 | ||||
|         self.assertEqual(w, 371) | ||||
|         self.assertEqual(h, 76) | ||||
| 
 | ||||
|         t.image.delete() | ||||
| 
 | ||||
|     @require_pil | ||||
|     def test_image_field_reassigning(self): | ||||
|         if not HAS_PIL: | ||||
|             raise SkipTest('PIL not installed') | ||||
| 
 | ||||
|         class TestFile(Document): | ||||
|             the_file = ImageField() | ||||
| 
 | ||||
|         TestFile.drop_collection() | ||||
| 
 | ||||
|         test_file = TestFile(the_file=get_file(TEST_IMAGE_PATH)).save() | ||||
|         assert test_file.the_file.size == (371, 76) | ||||
|         test_file = TestFile(the_file=open(TEST_IMAGE_PATH, 'rb')).save() | ||||
|         self.assertEqual(test_file.the_file.size, (371, 76)) | ||||
| 
 | ||||
|         test_file = TestFile.objects.first() | ||||
|         test_file.the_file = get_file(TEST_IMAGE2_PATH) | ||||
|         test_file.the_file = open(TEST_IMAGE2_PATH, 'rb') | ||||
|         test_file.save() | ||||
|         assert test_file.the_file.size == (45, 101) | ||||
|         self.assertEqual(test_file.the_file.size, (45, 101)) | ||||
| 
 | ||||
|     @require_pil | ||||
|     def test_image_field_resize(self): | ||||
|         if not HAS_PIL: | ||||
|             raise SkipTest('PIL not installed') | ||||
| 
 | ||||
|         class TestImage(Document): | ||||
|             image = ImageField(size=(185, 37)) | ||||
| 
 | ||||
|         TestImage.drop_collection() | ||||
| 
 | ||||
|         t = TestImage() | ||||
|         t.image.put(get_file(TEST_IMAGE_PATH)) | ||||
|         t.image.put(open(TEST_IMAGE_PATH, 'rb')) | ||||
|         t.save() | ||||
| 
 | ||||
|         t = TestImage.objects.first() | ||||
| 
 | ||||
|         assert t.image.format == "PNG" | ||||
|         self.assertEqual(t.image.format, 'PNG') | ||||
|         w, h = t.image.size | ||||
| 
 | ||||
|         assert w == 185 | ||||
|         assert h == 37 | ||||
|         self.assertEqual(w, 185) | ||||
|         self.assertEqual(h, 37) | ||||
| 
 | ||||
|         t.image.delete() | ||||
| 
 | ||||
|     @require_pil | ||||
|     def test_image_field_resize_force(self): | ||||
|         if not HAS_PIL: | ||||
|             raise SkipTest('PIL not installed') | ||||
| 
 | ||||
|         class TestImage(Document): | ||||
|             image = ImageField(size=(185, 37, True)) | ||||
| 
 | ||||
|         TestImage.drop_collection() | ||||
| 
 | ||||
|         t = TestImage() | ||||
|         t.image.put(get_file(TEST_IMAGE_PATH)) | ||||
|         t.image.put(open(TEST_IMAGE_PATH, 'rb')) | ||||
|         t.save() | ||||
| 
 | ||||
|         t = TestImage.objects.first() | ||||
| 
 | ||||
|         assert t.image.format == "PNG" | ||||
|         self.assertEqual(t.image.format, 'PNG') | ||||
|         w, h = t.image.size | ||||
| 
 | ||||
|         assert w == 185 | ||||
|         assert h == 37 | ||||
|         self.assertEqual(w, 185) | ||||
|         self.assertEqual(h, 37) | ||||
| 
 | ||||
|         t.image.delete() | ||||
| 
 | ||||
|     @require_pil | ||||
|     def test_image_field_thumbnail(self): | ||||
|         if not HAS_PIL: | ||||
|             raise SkipTest('PIL not installed') | ||||
| 
 | ||||
|         class TestImage(Document): | ||||
|             image = ImageField(thumbnail_size=(92, 18)) | ||||
| 
 | ||||
|         TestImage.drop_collection() | ||||
| 
 | ||||
|         t = TestImage() | ||||
|         t.image.put(get_file(TEST_IMAGE_PATH)) | ||||
|         t.image.put(open(TEST_IMAGE_PATH, 'rb')) | ||||
|         t.save() | ||||
| 
 | ||||
|         t = TestImage.objects.first() | ||||
| 
 | ||||
|         assert t.image.thumbnail.format == "PNG" | ||||
|         assert t.image.thumbnail.width == 92 | ||||
|         assert t.image.thumbnail.height == 18 | ||||
|         self.assertEqual(t.image.thumbnail.format, 'PNG') | ||||
|         self.assertEqual(t.image.thumbnail.width, 92) | ||||
|         self.assertEqual(t.image.thumbnail.height, 18) | ||||
| 
 | ||||
|         t.image.delete() | ||||
| 
 | ||||
|     def test_file_multidb(self): | ||||
|         register_connection("test_files", "test_files") | ||||
|         register_connection('test_files', 'test_files') | ||||
| 
 | ||||
|         class TestFile(Document): | ||||
|             name = StringField() | ||||
|             the_file = FileField(db_alias="test_files", collection_name="macumba") | ||||
|             the_file = FileField(db_alias="test_files", | ||||
|                                  collection_name="macumba") | ||||
| 
 | ||||
|         TestFile.drop_collection() | ||||
| 
 | ||||
| @@ -503,21 +492,23 @@ class TestFileField(MongoDBTestCase): | ||||
|         # First instance | ||||
|         test_file = TestFile() | ||||
|         test_file.name = "Hello, World!" | ||||
|         test_file.the_file.put(six.b("Hello, World!"), name="hello.txt") | ||||
|         test_file.the_file.put(six.b('Hello, World!'), | ||||
|                           name="hello.txt") | ||||
|         test_file.save() | ||||
| 
 | ||||
|         data = get_db("test_files").macumba.files.find_one() | ||||
|         assert data.get("name") == "hello.txt" | ||||
|         self.assertEqual(data.get('name'), 'hello.txt') | ||||
| 
 | ||||
|         test_file = TestFile.objects.first() | ||||
|         assert test_file.the_file.read() == six.b("Hello, World!") | ||||
|         self.assertEqual(test_file.the_file.read(), six.b('Hello, World!')) | ||||
| 
 | ||||
|         test_file = TestFile.objects.first() | ||||
|         test_file.the_file = six.b("HELLO, WORLD!") | ||||
|         test_file.the_file = six.b('HELLO, WORLD!') | ||||
|         test_file.save() | ||||
| 
 | ||||
|         test_file = TestFile.objects.first() | ||||
|         assert test_file.the_file.read() == six.b("HELLO, WORLD!") | ||||
|         self.assertEqual(test_file.the_file.read(), | ||||
|                          six.b('HELLO, WORLD!')) | ||||
| 
 | ||||
|     def test_copyable(self): | ||||
|         class PutFile(Document): | ||||
| @@ -525,8 +516,8 @@ class TestFileField(MongoDBTestCase): | ||||
| 
 | ||||
|         PutFile.drop_collection() | ||||
| 
 | ||||
|         text = six.b("Hello, World!") | ||||
|         content_type = "text/plain" | ||||
|         text = six.b('Hello, World!') | ||||
|         content_type = 'text/plain' | ||||
| 
 | ||||
|         putfile = PutFile() | ||||
|         putfile.the_file.put(text, content_type=content_type) | ||||
| @@ -535,11 +526,14 @@ class TestFileField(MongoDBTestCase): | ||||
|         class TestFile(Document): | ||||
|             name = StringField() | ||||
| 
 | ||||
|         assert putfile == copy.copy(putfile) | ||||
|         assert putfile == copy.deepcopy(putfile) | ||||
|         self.assertEqual(putfile, copy.copy(putfile)) | ||||
|         self.assertEqual(putfile, copy.deepcopy(putfile)) | ||||
| 
 | ||||
|     @require_pil | ||||
|     def test_get_image_by_grid_id(self): | ||||
| 
 | ||||
|         if not HAS_PIL: | ||||
|             raise SkipTest('PIL not installed') | ||||
| 
 | ||||
|         class TestImage(Document): | ||||
| 
 | ||||
|             image1 = ImageField() | ||||
| @@ -548,14 +542,15 @@ class TestFileField(MongoDBTestCase): | ||||
|         TestImage.drop_collection() | ||||
| 
 | ||||
|         t = TestImage() | ||||
|         t.image1.put(get_file(TEST_IMAGE_PATH)) | ||||
|         t.image2.put(get_file(TEST_IMAGE2_PATH)) | ||||
|         t.image1.put(open(TEST_IMAGE_PATH, 'rb')) | ||||
|         t.image2.put(open(TEST_IMAGE2_PATH, 'rb')) | ||||
|         t.save() | ||||
| 
 | ||||
|         test = TestImage.objects.first() | ||||
|         grid_id = test.image1.grid_id | ||||
| 
 | ||||
|         assert 1 == TestImage.objects(Q(image1=grid_id) or Q(image2=grid_id)).count() | ||||
|         self.assertEqual(1, TestImage.objects(Q(image1=grid_id) | ||||
|                                               or Q(image2=grid_id)).count()) | ||||
| 
 | ||||
|     def test_complex_field_filefield(self): | ||||
|         """Ensure you can add meta data to file""" | ||||
| @@ -566,21 +561,22 @@ class TestFileField(MongoDBTestCase): | ||||
|             photos = ListField(FileField()) | ||||
| 
 | ||||
|         Animal.drop_collection() | ||||
|         marmot = Animal(genus="Marmota", family="Sciuridae") | ||||
|         marmot = Animal(genus='Marmota', family='Sciuridae') | ||||
| 
 | ||||
|         with open(TEST_IMAGE_PATH, "rb") as marmot_photo:  # Retrieve a photo from disk | ||||
|             photos_field = marmot._fields["photos"].field | ||||
|             new_proxy = photos_field.get_proxy_obj("photos", marmot) | ||||
|             new_proxy.put(marmot_photo, content_type="image/jpeg", foo="bar") | ||||
|         marmot_photo = open(TEST_IMAGE_PATH, 'rb')  # Retrieve a photo from disk | ||||
| 
 | ||||
|         photos_field = marmot._fields['photos'].field | ||||
|         new_proxy = photos_field.get_proxy_obj('photos', marmot) | ||||
|         new_proxy.put(marmot_photo, content_type='image/jpeg', foo='bar') | ||||
|         marmot_photo.close() | ||||
| 
 | ||||
|         marmot.photos.append(new_proxy) | ||||
|         marmot.save() | ||||
| 
 | ||||
|         marmot = Animal.objects.get() | ||||
|         assert marmot.photos[0].content_type == "image/jpeg" | ||||
|         assert marmot.photos[0].foo == "bar" | ||||
|         assert marmot.photos[0].get().length == 8313 | ||||
|         self.assertEqual(marmot.photos[0].content_type, 'image/jpeg') | ||||
|         self.assertEqual(marmot.photos[0].foo, 'bar') | ||||
|         self.assertEqual(marmot.photos[0].get().length, 8313) | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == "__main__": | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
| @@ -2,23 +2,30 @@ | ||||
| import unittest | ||||
| 
 | ||||
| from mongoengine import * | ||||
| from tests.utils import MongoDBTestCase | ||||
| from mongoengine.connection import get_db | ||||
| 
 | ||||
| __all__ = ("GeoFieldTest", ) | ||||
| 
 | ||||
| 
 | ||||
| class TestGeoField(MongoDBTestCase): | ||||
| class GeoFieldTest(unittest.TestCase): | ||||
| 
 | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|         self.db = get_db() | ||||
| 
 | ||||
|     def _test_for_expected_error(self, Cls, loc, expected): | ||||
|         try: | ||||
|             Cls(loc=loc).validate() | ||||
|             self.fail("Should not validate the location {0}".format(loc)) | ||||
|             self.fail('Should not validate the location {0}'.format(loc)) | ||||
|         except ValidationError as e: | ||||
|             assert expected == e.to_dict()["loc"] | ||||
|             self.assertEqual(expected, e.to_dict()['loc']) | ||||
| 
 | ||||
|     def test_geopoint_validation(self): | ||||
|         class Location(Document): | ||||
|             loc = GeoPointField() | ||||
| 
 | ||||
|         invalid_coords = [{"x": 1, "y": 2}, 5, "a"] | ||||
|         expected = "GeoPointField can only accept tuples or lists of (x, y)" | ||||
|         expected = 'GeoPointField can only accept tuples or lists of (x, y)' | ||||
| 
 | ||||
|         for coord in invalid_coords: | ||||
|             self._test_for_expected_error(Location, coord, expected) | ||||
| @@ -33,19 +40,12 @@ class TestGeoField(MongoDBTestCase): | ||||
|             expected = "Both values (%s) in point must be float or int" % repr(coord) | ||||
|             self._test_for_expected_error(Location, coord, expected) | ||||
| 
 | ||||
|         invalid_coords = [21, 4, "a"] | ||||
|         for coord in invalid_coords: | ||||
|             expected = "GeoPointField can only accept tuples or lists of (x, y)" | ||||
|             self._test_for_expected_error(Location, coord, expected) | ||||
| 
 | ||||
|     def test_point_validation(self): | ||||
|         class Location(Document): | ||||
|             loc = PointField() | ||||
| 
 | ||||
|         invalid_coords = {"x": 1, "y": 2} | ||||
|         expected = ( | ||||
|             "PointField can only accept a valid GeoJson dictionary or lists of (x, y)" | ||||
|         ) | ||||
|         expected = 'PointField can only accept a valid GeoJson dictionary or lists of (x, y)' | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
| 
 | ||||
|         invalid_coords = {"type": "MadeUp", "coordinates": []} | ||||
| @@ -72,16 +72,19 @@ class TestGeoField(MongoDBTestCase): | ||||
|             self._test_for_expected_error(Location, coord, expected) | ||||
| 
 | ||||
|         Location(loc=[1, 2]).validate() | ||||
|         Location( | ||||
|             loc={"type": "Point", "coordinates": [81.4471435546875, 23.61432859499169]} | ||||
|         ).validate() | ||||
|         Location(loc={ | ||||
|             "type": "Point", | ||||
|             "coordinates": [ | ||||
|               81.4471435546875, | ||||
|               23.61432859499169 | ||||
|             ]}).validate() | ||||
| 
 | ||||
|     def test_linestring_validation(self): | ||||
|         class Location(Document): | ||||
|             loc = LineStringField() | ||||
| 
 | ||||
|         invalid_coords = {"x": 1, "y": 2} | ||||
|         expected = "LineStringField can only accept a valid GeoJson dictionary or lists of (x, y)" | ||||
|         expected = 'LineStringField can only accept a valid GeoJson dictionary or lists of (x, y)' | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
| 
 | ||||
|         invalid_coords = {"type": "MadeUp", "coordinates": [[]]} | ||||
| @@ -89,9 +92,7 @@ class TestGeoField(MongoDBTestCase): | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
| 
 | ||||
|         invalid_coords = {"type": "LineString", "coordinates": [[1, 2, 3]]} | ||||
|         expected = ( | ||||
|             "Invalid LineString:\nValue ([1, 2, 3]) must be a two-dimensional point" | ||||
|         ) | ||||
|         expected = "Invalid LineString:\nValue ([1, 2, 3]) must be a two-dimensional point" | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
| 
 | ||||
|         invalid_coords = [5, "a"] | ||||
| @@ -99,25 +100,16 @@ class TestGeoField(MongoDBTestCase): | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
| 
 | ||||
|         invalid_coords = [[1]] | ||||
|         expected = ( | ||||
|             "Invalid LineString:\nValue (%s) must be a two-dimensional point" | ||||
|             % repr(invalid_coords[0]) | ||||
|         ) | ||||
|         expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0]) | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
| 
 | ||||
|         invalid_coords = [[1, 2, 3]] | ||||
|         expected = ( | ||||
|             "Invalid LineString:\nValue (%s) must be a two-dimensional point" | ||||
|             % repr(invalid_coords[0]) | ||||
|         ) | ||||
|         expected = "Invalid LineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0]) | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
| 
 | ||||
|         invalid_coords = [[[{}, {}]], [("a", "b")]] | ||||
|         for coord in invalid_coords: | ||||
|             expected = ( | ||||
|                 "Invalid LineString:\nBoth values (%s) in point must be float or int" | ||||
|                 % repr(coord[0]) | ||||
|             ) | ||||
|             expected = "Invalid LineString:\nBoth values (%s) in point must be float or int" % repr(coord[0]) | ||||
|             self._test_for_expected_error(Location, coord, expected) | ||||
| 
 | ||||
|         Location(loc=[[1, 2], [3, 4], [5, 6], [1, 2]]).validate() | ||||
| @@ -127,9 +119,7 @@ class TestGeoField(MongoDBTestCase): | ||||
|             loc = PolygonField() | ||||
| 
 | ||||
|         invalid_coords = {"x": 1, "y": 2} | ||||
|         expected = ( | ||||
|             "PolygonField can only accept a valid GeoJson dictionary or lists of (x, y)" | ||||
|         ) | ||||
|         expected = 'PolygonField can only accept a valid GeoJson dictionary or lists of (x, y)' | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
| 
 | ||||
|         invalid_coords = {"type": "MadeUp", "coordinates": [[]]} | ||||
| @@ -141,9 +131,7 @@ class TestGeoField(MongoDBTestCase): | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
| 
 | ||||
|         invalid_coords = [[[5, "a"]]] | ||||
|         expected = ( | ||||
|             "Invalid Polygon:\nBoth values ([5, 'a']) in point must be float or int" | ||||
|         ) | ||||
|         expected = "Invalid Polygon:\nBoth values ([5, 'a']) in point must be float or int" | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
| 
 | ||||
|         invalid_coords = [[[]]] | ||||
| @@ -169,7 +157,7 @@ class TestGeoField(MongoDBTestCase): | ||||
|             loc = MultiPointField() | ||||
| 
 | ||||
|         invalid_coords = {"x": 1, "y": 2} | ||||
|         expected = "MultiPointField can only accept a valid GeoJson dictionary or lists of (x, y)" | ||||
|         expected = 'MultiPointField can only accept a valid GeoJson dictionary or lists of (x, y)' | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
| 
 | ||||
|         invalid_coords = {"type": "MadeUp", "coordinates": [[]]} | ||||
| @@ -195,19 +183,19 @@ class TestGeoField(MongoDBTestCase): | ||||
|             self._test_for_expected_error(Location, coord, expected) | ||||
| 
 | ||||
|         Location(loc=[[1, 2]]).validate() | ||||
|         Location( | ||||
|             loc={ | ||||
|                 "type": "MultiPoint", | ||||
|                 "coordinates": [[1, 2], [81.4471435546875, 23.61432859499169]], | ||||
|             } | ||||
|         ).validate() | ||||
|         Location(loc={ | ||||
|             "type": "MultiPoint", | ||||
|             "coordinates": [ | ||||
|                 [1, 2], | ||||
|                 [81.4471435546875, 23.61432859499169] | ||||
|             ]}).validate() | ||||
| 
 | ||||
|     def test_multilinestring_validation(self): | ||||
|         class Location(Document): | ||||
|             loc = MultiLineStringField() | ||||
| 
 | ||||
|         invalid_coords = {"x": 1, "y": 2} | ||||
|         expected = "MultiLineStringField can only accept a valid GeoJson dictionary or lists of (x, y)" | ||||
|         expected = 'MultiLineStringField can only accept a valid GeoJson dictionary or lists of (x, y)' | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
| 
 | ||||
|         invalid_coords = {"type": "MadeUp", "coordinates": [[]]} | ||||
| @@ -223,25 +211,16 @@ class TestGeoField(MongoDBTestCase): | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
| 
 | ||||
|         invalid_coords = [[[1]]] | ||||
|         expected = ( | ||||
|             "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" | ||||
|             % repr(invalid_coords[0][0]) | ||||
|         ) | ||||
|         expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0]) | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
| 
 | ||||
|         invalid_coords = [[[1, 2, 3]]] | ||||
|         expected = ( | ||||
|             "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" | ||||
|             % repr(invalid_coords[0][0]) | ||||
|         ) | ||||
|         expected = "Invalid MultiLineString:\nValue (%s) must be a two-dimensional point" % repr(invalid_coords[0][0]) | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
| 
 | ||||
|         invalid_coords = [[[[{}, {}]]], [[("a", "b")]]] | ||||
|         for coord in invalid_coords: | ||||
|             expected = ( | ||||
|                 "Invalid MultiLineString:\nBoth values (%s) in point must be float or int" | ||||
|                 % repr(coord[0][0]) | ||||
|             ) | ||||
|             expected = "Invalid MultiLineString:\nBoth values (%s) in point must be float or int" % repr(coord[0][0]) | ||||
|             self._test_for_expected_error(Location, coord, expected) | ||||
| 
 | ||||
|         Location(loc=[[[1, 2], [3, 4], [5, 6], [1, 2]]]).validate() | ||||
| @@ -251,7 +230,7 @@ class TestGeoField(MongoDBTestCase): | ||||
|             loc = MultiPolygonField() | ||||
| 
 | ||||
|         invalid_coords = {"x": 1, "y": 2} | ||||
|         expected = "MultiPolygonField can only accept a valid GeoJson dictionary or lists of (x, y)" | ||||
|         expected = 'MultiPolygonField can only accept a valid GeoJson dictionary or lists of (x, y)' | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
| 
 | ||||
|         invalid_coords = {"type": "MadeUp", "coordinates": [[]]} | ||||
| @@ -259,9 +238,7 @@ class TestGeoField(MongoDBTestCase): | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
| 
 | ||||
|         invalid_coords = {"type": "MultiPolygon", "coordinates": [[[[1, 2, 3]]]]} | ||||
|         expected = ( | ||||
|             "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point" | ||||
|         ) | ||||
|         expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point" | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
| 
 | ||||
|         invalid_coords = [[[[5, "a"]]]] | ||||
| @@ -273,9 +250,7 @@ class TestGeoField(MongoDBTestCase): | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
| 
 | ||||
|         invalid_coords = [[[[1, 2, 3]]]] | ||||
|         expected = ( | ||||
|             "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point" | ||||
|         ) | ||||
|         expected = "Invalid MultiPolygon:\nValue ([1, 2, 3]) must be a two-dimensional point" | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
| 
 | ||||
|         invalid_coords = [[[[{}, {}]]], [[("a", "b")]]] | ||||
| @@ -283,9 +258,7 @@ class TestGeoField(MongoDBTestCase): | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
| 
 | ||||
|         invalid_coords = [[[[1, 2], [3, 4]]]] | ||||
|         expected = ( | ||||
|             "Invalid MultiPolygon:\nLineStrings must start and end at the same point" | ||||
|         ) | ||||
|         expected = "Invalid MultiPolygon:\nLineStrings must start and end at the same point" | ||||
|         self._test_for_expected_error(Location, invalid_coords, expected) | ||||
| 
 | ||||
|         Location(loc=[[[[1, 2], [3, 4], [5, 6], [1, 2]]]]).validate() | ||||
| @@ -293,19 +266,17 @@ class TestGeoField(MongoDBTestCase): | ||||
|     def test_indexes_geopoint(self): | ||||
|         """Ensure that indexes are created automatically for GeoPointFields. | ||||
|         """ | ||||
| 
 | ||||
|         class Event(Document): | ||||
|             title = StringField() | ||||
|             location = GeoPointField() | ||||
| 
 | ||||
|         geo_indicies = Event._geo_indices() | ||||
|         assert geo_indicies == [{"fields": [("location", "2d")]}] | ||||
|         self.assertEqual(geo_indicies, [{'fields': [('location', '2d')]}]) | ||||
| 
 | ||||
|     def test_geopoint_embedded_indexes(self): | ||||
|         """Ensure that indexes are created automatically for GeoPointFields on | ||||
|         embedded documents. | ||||
|         """ | ||||
| 
 | ||||
|         class Venue(EmbeddedDocument): | ||||
|             location = GeoPointField() | ||||
|             name = StringField() | ||||
| @@ -315,12 +286,11 @@ class TestGeoField(MongoDBTestCase): | ||||
|             venue = EmbeddedDocumentField(Venue) | ||||
| 
 | ||||
|         geo_indicies = Event._geo_indices() | ||||
|         assert geo_indicies == [{"fields": [("venue.location", "2d")]}] | ||||
|         self.assertEqual(geo_indicies, [{'fields': [('venue.location', '2d')]}]) | ||||
| 
 | ||||
|     def test_indexes_2dsphere(self): | ||||
|         """Ensure that indexes are created automatically for GeoPointFields. | ||||
|         """ | ||||
| 
 | ||||
|         class Event(Document): | ||||
|             title = StringField() | ||||
|             point = PointField() | ||||
| @@ -328,14 +298,13 @@ class TestGeoField(MongoDBTestCase): | ||||
|             polygon = PolygonField() | ||||
| 
 | ||||
|         geo_indicies = Event._geo_indices() | ||||
|         assert {"fields": [("line", "2dsphere")]} in geo_indicies | ||||
|         assert {"fields": [("polygon", "2dsphere")]} in geo_indicies | ||||
|         assert {"fields": [("point", "2dsphere")]} in geo_indicies | ||||
|         self.assertTrue({'fields': [('line', '2dsphere')]} in geo_indicies) | ||||
|         self.assertTrue({'fields': [('polygon', '2dsphere')]} in geo_indicies) | ||||
|         self.assertTrue({'fields': [('point', '2dsphere')]} in geo_indicies) | ||||
| 
 | ||||
|     def test_indexes_2dsphere_embedded(self): | ||||
|         """Ensure that indexes are created automatically for GeoPointFields. | ||||
|         """ | ||||
| 
 | ||||
|         class Venue(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|             point = PointField() | ||||
| @@ -347,11 +316,12 @@ class TestGeoField(MongoDBTestCase): | ||||
|             venue = EmbeddedDocumentField(Venue) | ||||
| 
 | ||||
|         geo_indicies = Event._geo_indices() | ||||
|         assert {"fields": [("venue.line", "2dsphere")]} in geo_indicies | ||||
|         assert {"fields": [("venue.polygon", "2dsphere")]} in geo_indicies | ||||
|         assert {"fields": [("venue.point", "2dsphere")]} in geo_indicies | ||||
|         self.assertTrue({'fields': [('venue.line', '2dsphere')]} in geo_indicies) | ||||
|         self.assertTrue({'fields': [('venue.polygon', '2dsphere')]} in geo_indicies) | ||||
|         self.assertTrue({'fields': [('venue.point', '2dsphere')]} in geo_indicies) | ||||
| 
 | ||||
|     def test_geo_indexes_recursion(self): | ||||
| 
 | ||||
|         class Location(Document): | ||||
|             name = StringField() | ||||
|             location = GeoPointField() | ||||
| @@ -363,14 +333,14 @@ class TestGeoField(MongoDBTestCase): | ||||
|         Location.drop_collection() | ||||
|         Parent.drop_collection() | ||||
| 
 | ||||
|         Parent(name="Berlin").save() | ||||
|         Parent(name='Berlin').save() | ||||
|         info = Parent._get_collection().index_information() | ||||
|         assert "location_2d" not in info | ||||
|         self.assertFalse('location_2d' in info) | ||||
|         info = Location._get_collection().index_information() | ||||
|         assert "location_2d" in info | ||||
|         self.assertTrue('location_2d' in info) | ||||
| 
 | ||||
|         assert len(Parent._geo_indices()) == 0 | ||||
|         assert len(Location._geo_indices()) == 1 | ||||
|         self.assertEqual(len(Parent._geo_indices()), 0) | ||||
|         self.assertEqual(len(Location._geo_indices()), 1) | ||||
| 
 | ||||
|     def test_geo_indexes_auto_index(self): | ||||
| 
 | ||||
| @@ -379,18 +349,18 @@ class TestGeoField(MongoDBTestCase): | ||||
|             location = PointField(auto_index=False) | ||||
|             datetime = DateTimeField() | ||||
| 
 | ||||
|             meta = {"indexes": [[("location", "2dsphere"), ("datetime", 1)]]} | ||||
|             meta = { | ||||
|                 'indexes': [[("location", "2dsphere"), ("datetime", 1)]] | ||||
|             } | ||||
| 
 | ||||
|         assert [] == Log._geo_indices() | ||||
|         self.assertEqual([], Log._geo_indices()) | ||||
| 
 | ||||
|         Log.drop_collection() | ||||
|         Log.ensure_indexes() | ||||
| 
 | ||||
|         info = Log._get_collection().index_information() | ||||
|         assert info["location_2dsphere_datetime_1"]["key"] == [ | ||||
|             ("location", "2dsphere"), | ||||
|             ("datetime", 1), | ||||
|         ] | ||||
|         self.assertEqual(info["location_2dsphere_datetime_1"]["key"], | ||||
|                          [('location', '2dsphere'), ('datetime', 1)]) | ||||
| 
 | ||||
|         # Test listing explicitly | ||||
|         class Log(Document): | ||||
| @@ -398,20 +368,20 @@ class TestGeoField(MongoDBTestCase): | ||||
|             datetime = DateTimeField() | ||||
| 
 | ||||
|             meta = { | ||||
|                 "indexes": [{"fields": [("location", "2dsphere"), ("datetime", 1)]}] | ||||
|                 'indexes': [ | ||||
|                     {'fields': [("location", "2dsphere"), ("datetime", 1)]} | ||||
|                 ] | ||||
|             } | ||||
| 
 | ||||
|         assert [] == Log._geo_indices() | ||||
|         self.assertEqual([], Log._geo_indices()) | ||||
| 
 | ||||
|         Log.drop_collection() | ||||
|         Log.ensure_indexes() | ||||
| 
 | ||||
|         info = Log._get_collection().index_information() | ||||
|         assert info["location_2dsphere_datetime_1"]["key"] == [ | ||||
|             ("location", "2dsphere"), | ||||
|             ("datetime", 1), | ||||
|         ] | ||||
|         self.assertEqual(info["location_2dsphere_datetime_1"]["key"], | ||||
|                          [('location', '2dsphere'), ('datetime', 1)]) | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == "__main__": | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
| @@ -1,150 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import uuid | ||||
|  | ||||
| from bson import Binary | ||||
| import pytest | ||||
| import six | ||||
|  | ||||
| from mongoengine import * | ||||
| from tests.utils import MongoDBTestCase | ||||
|  | ||||
| BIN_VALUE = six.b( | ||||
|     "\xa9\xf3\x8d(\xd7\x03\x84\xb4k[\x0f\xe3\xa2\x19\x85p[J\xa3\xd2>\xde\xe6\x87\xb1\x7f\xc6\xe6\xd9r\x18\xf5" | ||||
| ) | ||||
|  | ||||
|  | ||||
| class TestBinaryField(MongoDBTestCase): | ||||
|     def test_binary_fields(self): | ||||
|         """Ensure that binary fields can be stored and retrieved. | ||||
|         """ | ||||
|  | ||||
|         class Attachment(Document): | ||||
|             content_type = StringField() | ||||
|             blob = BinaryField() | ||||
|  | ||||
|         BLOB = six.b("\xe6\x00\xc4\xff\x07") | ||||
|         MIME_TYPE = "application/octet-stream" | ||||
|  | ||||
|         Attachment.drop_collection() | ||||
|  | ||||
|         attachment = Attachment(content_type=MIME_TYPE, blob=BLOB) | ||||
|         attachment.save() | ||||
|  | ||||
|         attachment_1 = Attachment.objects().first() | ||||
|         assert MIME_TYPE == attachment_1.content_type | ||||
|         assert BLOB == six.binary_type(attachment_1.blob) | ||||
|  | ||||
|     def test_validation_succeeds(self): | ||||
|         """Ensure that valid values can be assigned to binary fields. | ||||
|         """ | ||||
|  | ||||
|         class AttachmentRequired(Document): | ||||
|             blob = BinaryField(required=True) | ||||
|  | ||||
|         class AttachmentSizeLimit(Document): | ||||
|             blob = BinaryField(max_bytes=4) | ||||
|  | ||||
|         attachment_required = AttachmentRequired() | ||||
|         with pytest.raises(ValidationError): | ||||
|             attachment_required.validate() | ||||
|         attachment_required.blob = Binary(six.b("\xe6\x00\xc4\xff\x07")) | ||||
|         attachment_required.validate() | ||||
|  | ||||
|         _5_BYTES = six.b("\xe6\x00\xc4\xff\x07") | ||||
|         _4_BYTES = six.b("\xe6\x00\xc4\xff") | ||||
|         with pytest.raises(ValidationError): | ||||
|             AttachmentSizeLimit(blob=_5_BYTES).validate() | ||||
|         AttachmentSizeLimit(blob=_4_BYTES).validate() | ||||
|  | ||||
|     def test_validation_fails(self): | ||||
|         """Ensure that invalid values cannot be assigned to binary fields.""" | ||||
|  | ||||
|         class Attachment(Document): | ||||
|             blob = BinaryField() | ||||
|  | ||||
|         for invalid_data in (2, u"Im_a_unicode", ["some_str"]): | ||||
|             with pytest.raises(ValidationError): | ||||
|                 Attachment(blob=invalid_data).validate() | ||||
|  | ||||
|     def test__primary(self): | ||||
|         class Attachment(Document): | ||||
|             id = BinaryField(primary_key=True) | ||||
|  | ||||
|         Attachment.drop_collection() | ||||
|         binary_id = uuid.uuid4().bytes | ||||
|         att = Attachment(id=binary_id).save() | ||||
|         assert 1 == Attachment.objects.count() | ||||
|         assert 1 == Attachment.objects.filter(id=att.id).count() | ||||
|         att.delete() | ||||
|         assert 0 == Attachment.objects.count() | ||||
|  | ||||
|     def test_primary_filter_by_binary_pk_as_str(self): | ||||
|         class Attachment(Document): | ||||
|             id = BinaryField(primary_key=True) | ||||
|  | ||||
|         Attachment.drop_collection() | ||||
|         binary_id = uuid.uuid4().bytes | ||||
|         att = Attachment(id=binary_id).save() | ||||
|         assert 1 == Attachment.objects.filter(id=binary_id).count() | ||||
|         att.delete() | ||||
|         assert 0 == Attachment.objects.count() | ||||
|  | ||||
|     def test_match_querying_with_bytes(self): | ||||
|         class MyDocument(Document): | ||||
|             bin_field = BinaryField() | ||||
|  | ||||
|         MyDocument.drop_collection() | ||||
|  | ||||
|         doc = MyDocument(bin_field=BIN_VALUE).save() | ||||
|         matched_doc = MyDocument.objects(bin_field=BIN_VALUE).first() | ||||
|         assert matched_doc.id == doc.id | ||||
|  | ||||
|     def test_match_querying_with_binary(self): | ||||
|         class MyDocument(Document): | ||||
|             bin_field = BinaryField() | ||||
|  | ||||
|         MyDocument.drop_collection() | ||||
|  | ||||
|         doc = MyDocument(bin_field=BIN_VALUE).save() | ||||
|  | ||||
|         matched_doc = MyDocument.objects(bin_field=Binary(BIN_VALUE)).first() | ||||
|         assert matched_doc.id == doc.id | ||||
|  | ||||
|     def test_modify_operation__set(self): | ||||
|         """Ensures no regression of bug #1127""" | ||||
|  | ||||
|         class MyDocument(Document): | ||||
|             some_field = StringField() | ||||
|             bin_field = BinaryField() | ||||
|  | ||||
|         MyDocument.drop_collection() | ||||
|  | ||||
|         doc = MyDocument.objects(some_field="test").modify( | ||||
|             upsert=True, new=True, set__bin_field=BIN_VALUE | ||||
|         ) | ||||
|         assert doc.some_field == "test" | ||||
|         if six.PY3: | ||||
|             assert doc.bin_field == BIN_VALUE | ||||
|         else: | ||||
|             assert doc.bin_field == Binary(BIN_VALUE) | ||||
|  | ||||
|     def test_update_one(self): | ||||
|         """Ensures no regression of bug #1127""" | ||||
|  | ||||
|         class MyDocument(Document): | ||||
|             bin_field = BinaryField() | ||||
|  | ||||
|         MyDocument.drop_collection() | ||||
|  | ||||
|         bin_data = six.b("\xe6\x00\xc4\xff\x07") | ||||
|         doc = MyDocument(bin_field=bin_data).save() | ||||
|  | ||||
|         n_updated = MyDocument.objects(bin_field=bin_data).update_one( | ||||
|             bin_field=BIN_VALUE | ||||
|         ) | ||||
|         assert n_updated == 1 | ||||
|         fetched = MyDocument.objects.with_id(doc.id) | ||||
|         if six.PY3: | ||||
|             assert fetched.bin_field == BIN_VALUE | ||||
|         else: | ||||
|             assert fetched.bin_field == Binary(BIN_VALUE) | ||||
| @@ -1,52 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import pytest | ||||
|  | ||||
| from mongoengine import * | ||||
| from tests.utils import MongoDBTestCase, get_as_pymongo | ||||
|  | ||||
|  | ||||
| class TestBooleanField(MongoDBTestCase): | ||||
|     def test_storage(self): | ||||
|         class Person(Document): | ||||
|             admin = BooleanField() | ||||
|  | ||||
|         person = Person(admin=True) | ||||
|         person.save() | ||||
|         assert get_as_pymongo(person) == {"_id": person.id, "admin": True} | ||||
|  | ||||
|     def test_validation(self): | ||||
|         """Ensure that invalid values cannot be assigned to boolean | ||||
|         fields. | ||||
|         """ | ||||
|  | ||||
|         class Person(Document): | ||||
|             admin = BooleanField() | ||||
|  | ||||
|         person = Person() | ||||
|         person.admin = True | ||||
|         person.validate() | ||||
|  | ||||
|         person.admin = 2 | ||||
|         with pytest.raises(ValidationError): | ||||
|             person.validate() | ||||
|         person.admin = "Yes" | ||||
|         with pytest.raises(ValidationError): | ||||
|             person.validate() | ||||
|         person.admin = "False" | ||||
|         with pytest.raises(ValidationError): | ||||
|             person.validate() | ||||
|  | ||||
|     def test_weirdness_constructor(self): | ||||
|         """When attribute is set in contructor, it gets cast into a bool | ||||
|         which causes some weird behavior. We dont necessarily want to maintain this behavior | ||||
|         but its a known issue | ||||
|         """ | ||||
|  | ||||
|         class Person(Document): | ||||
|             admin = BooleanField() | ||||
|  | ||||
|         new_person = Person(admin="False") | ||||
|         assert new_person.admin | ||||
|  | ||||
|         new_person = Person(admin="0") | ||||
|         assert new_person.admin | ||||
| @@ -1,377 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| from decimal import Decimal | ||||
|  | ||||
| import pytest | ||||
|  | ||||
| from mongoengine import * | ||||
| from tests.utils import MongoDBTestCase | ||||
|  | ||||
|  | ||||
| class TestCachedReferenceField(MongoDBTestCase): | ||||
|     def test_get_and_save(self): | ||||
|         """ | ||||
|         Tests #1047: CachedReferenceField creates DBRefs on to_python, | ||||
|         but can't save them on to_mongo. | ||||
|         """ | ||||
|  | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             tag = StringField() | ||||
|  | ||||
|         class Ocorrence(Document): | ||||
|             person = StringField() | ||||
|             animal = CachedReferenceField(Animal) | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|         Ocorrence.drop_collection() | ||||
|  | ||||
|         Ocorrence( | ||||
|             person="testte", animal=Animal(name="Leopard", tag="heavy").save() | ||||
|         ).save() | ||||
|         p = Ocorrence.objects.get() | ||||
|         p.person = "new_testte" | ||||
|         p.save() | ||||
|  | ||||
|     def test_general_things(self): | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             tag = StringField() | ||||
|  | ||||
|         class Ocorrence(Document): | ||||
|             person = StringField() | ||||
|             animal = CachedReferenceField(Animal, fields=["tag"]) | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|         Ocorrence.drop_collection() | ||||
|  | ||||
|         a = Animal(name="Leopard", tag="heavy") | ||||
|         a.save() | ||||
|  | ||||
|         assert Animal._cached_reference_fields == [Ocorrence.animal] | ||||
|         o = Ocorrence(person="teste", animal=a) | ||||
|         o.save() | ||||
|  | ||||
|         p = Ocorrence(person="Wilson") | ||||
|         p.save() | ||||
|  | ||||
|         assert Ocorrence.objects(animal=None).count() == 1 | ||||
|  | ||||
|         assert a.to_mongo(fields=["tag"]) == {"tag": "heavy", "_id": a.pk} | ||||
|  | ||||
|         assert o.to_mongo()["animal"]["tag"] == "heavy" | ||||
|  | ||||
|         # counts | ||||
|         Ocorrence(person="teste 2").save() | ||||
|         Ocorrence(person="teste 3").save() | ||||
|  | ||||
|         count = Ocorrence.objects(animal__tag="heavy").count() | ||||
|         assert count == 1 | ||||
|  | ||||
|         ocorrence = Ocorrence.objects(animal__tag="heavy").first() | ||||
|         assert ocorrence.person == "teste" | ||||
|         assert isinstance(ocorrence.animal, Animal) | ||||
|  | ||||
|     def test_with_decimal(self): | ||||
|         class PersonAuto(Document): | ||||
|             name = StringField() | ||||
|             salary = DecimalField() | ||||
|  | ||||
|         class SocialTest(Document): | ||||
|             group = StringField() | ||||
|             person = CachedReferenceField(PersonAuto, fields=("salary",)) | ||||
|  | ||||
|         PersonAuto.drop_collection() | ||||
|         SocialTest.drop_collection() | ||||
|  | ||||
|         p = PersonAuto(name="Alberto", salary=Decimal("7000.00")) | ||||
|         p.save() | ||||
|  | ||||
|         s = SocialTest(group="dev", person=p) | ||||
|         s.save() | ||||
|  | ||||
|         assert SocialTest.objects._collection.find_one({"person.salary": 7000.00}) == { | ||||
|             "_id": s.pk, | ||||
|             "group": s.group, | ||||
|             "person": {"_id": p.pk, "salary": 7000.00}, | ||||
|         } | ||||
|  | ||||
|     def test_cached_reference_field_reference(self): | ||||
|         class Group(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             group = ReferenceField(Group) | ||||
|  | ||||
|         class SocialData(Document): | ||||
|             obs = StringField() | ||||
|             tags = ListField(StringField()) | ||||
|             person = CachedReferenceField(Person, fields=("group",)) | ||||
|  | ||||
|         Group.drop_collection() | ||||
|         Person.drop_collection() | ||||
|         SocialData.drop_collection() | ||||
|  | ||||
|         g1 = Group(name="dev") | ||||
|         g1.save() | ||||
|  | ||||
|         g2 = Group(name="designers") | ||||
|         g2.save() | ||||
|  | ||||
|         p1 = Person(name="Alberto", group=g1) | ||||
|         p1.save() | ||||
|  | ||||
|         p2 = Person(name="Andre", group=g1) | ||||
|         p2.save() | ||||
|  | ||||
|         p3 = Person(name="Afro design", group=g2) | ||||
|         p3.save() | ||||
|  | ||||
|         s1 = SocialData(obs="testing 123", person=p1, tags=["tag1", "tag2"]) | ||||
|         s1.save() | ||||
|  | ||||
|         s2 = SocialData(obs="testing 321", person=p3, tags=["tag3", "tag4"]) | ||||
|         s2.save() | ||||
|  | ||||
|         assert SocialData.objects._collection.find_one({"tags": "tag2"}) == { | ||||
|             "_id": s1.pk, | ||||
|             "obs": "testing 123", | ||||
|             "tags": ["tag1", "tag2"], | ||||
|             "person": {"_id": p1.pk, "group": g1.pk}, | ||||
|         } | ||||
|  | ||||
|         assert SocialData.objects(person__group=g2).count() == 1 | ||||
|         assert SocialData.objects(person__group=g2).first() == s2 | ||||
|  | ||||
|     def test_cached_reference_field_push_with_fields(self): | ||||
|         class Product(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         Product.drop_collection() | ||||
|  | ||||
|         class Basket(Document): | ||||
|             products = ListField(CachedReferenceField(Product, fields=["name"])) | ||||
|  | ||||
|         Basket.drop_collection() | ||||
|         product1 = Product(name="abc").save() | ||||
|         product2 = Product(name="def").save() | ||||
|         basket = Basket(products=[product1]).save() | ||||
|         assert Basket.objects._collection.find_one() == { | ||||
|             "_id": basket.pk, | ||||
|             "products": [{"_id": product1.pk, "name": product1.name}], | ||||
|         } | ||||
|         # push to list | ||||
|         basket.update(push__products=product2) | ||||
|         basket.reload() | ||||
|         assert Basket.objects._collection.find_one() == { | ||||
|             "_id": basket.pk, | ||||
|             "products": [ | ||||
|                 {"_id": product1.pk, "name": product1.name}, | ||||
|                 {"_id": product2.pk, "name": product2.name}, | ||||
|             ], | ||||
|         } | ||||
|  | ||||
|     def test_cached_reference_field_update_all(self): | ||||
|         class Person(Document): | ||||
|             TYPES = (("pf", "PF"), ("pj", "PJ")) | ||||
|             name = StringField() | ||||
|             tp = StringField(choices=TYPES) | ||||
|             father = CachedReferenceField("self", fields=("tp",)) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         a1 = Person(name="Wilson Father", tp="pj") | ||||
|         a1.save() | ||||
|  | ||||
|         a2 = Person(name="Wilson Junior", tp="pf", father=a1) | ||||
|         a2.save() | ||||
|  | ||||
|         a2 = Person.objects.with_id(a2.id) | ||||
|         assert a2.father.tp == a1.tp | ||||
|  | ||||
|         assert dict(a2.to_mongo()) == { | ||||
|             "_id": a2.pk, | ||||
|             "name": u"Wilson Junior", | ||||
|             "tp": u"pf", | ||||
|             "father": {"_id": a1.pk, "tp": u"pj"}, | ||||
|         } | ||||
|  | ||||
|         assert Person.objects(father=a1)._query == {"father._id": a1.pk} | ||||
|         assert Person.objects(father=a1).count() == 1 | ||||
|  | ||||
|         Person.objects.update(set__tp="pf") | ||||
|         Person.father.sync_all() | ||||
|  | ||||
|         a2.reload() | ||||
|         assert dict(a2.to_mongo()) == { | ||||
|             "_id": a2.pk, | ||||
|             "name": u"Wilson Junior", | ||||
|             "tp": u"pf", | ||||
|             "father": {"_id": a1.pk, "tp": u"pf"}, | ||||
|         } | ||||
|  | ||||
|     def test_cached_reference_fields_on_embedded_documents(self): | ||||
|         with pytest.raises(InvalidDocumentError): | ||||
|  | ||||
|             class Test(Document): | ||||
|                 name = StringField() | ||||
|  | ||||
|             type( | ||||
|                 "WrongEmbeddedDocument", | ||||
|                 (EmbeddedDocument,), | ||||
|                 {"test": CachedReferenceField(Test)}, | ||||
|             ) | ||||
|  | ||||
|     def test_cached_reference_auto_sync(self): | ||||
|         class Person(Document): | ||||
|             TYPES = (("pf", "PF"), ("pj", "PJ")) | ||||
|             name = StringField() | ||||
|             tp = StringField(choices=TYPES) | ||||
|  | ||||
|             father = CachedReferenceField("self", fields=("tp",)) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         a1 = Person(name="Wilson Father", tp="pj") | ||||
|         a1.save() | ||||
|  | ||||
|         a2 = Person(name="Wilson Junior", tp="pf", father=a1) | ||||
|         a2.save() | ||||
|  | ||||
|         a1.tp = "pf" | ||||
|         a1.save() | ||||
|  | ||||
|         a2.reload() | ||||
|         assert dict(a2.to_mongo()) == { | ||||
|             "_id": a2.pk, | ||||
|             "name": "Wilson Junior", | ||||
|             "tp": "pf", | ||||
|             "father": {"_id": a1.pk, "tp": "pf"}, | ||||
|         } | ||||
|  | ||||
|     def test_cached_reference_auto_sync_disabled(self): | ||||
|         class Persone(Document): | ||||
|             TYPES = (("pf", "PF"), ("pj", "PJ")) | ||||
|             name = StringField() | ||||
|             tp = StringField(choices=TYPES) | ||||
|  | ||||
|             father = CachedReferenceField("self", fields=("tp",), auto_sync=False) | ||||
|  | ||||
|         Persone.drop_collection() | ||||
|  | ||||
|         a1 = Persone(name="Wilson Father", tp="pj") | ||||
|         a1.save() | ||||
|  | ||||
|         a2 = Persone(name="Wilson Junior", tp="pf", father=a1) | ||||
|         a2.save() | ||||
|  | ||||
|         a1.tp = "pf" | ||||
|         a1.save() | ||||
|  | ||||
|         assert Persone.objects._collection.find_one({"_id": a2.pk}) == { | ||||
|             "_id": a2.pk, | ||||
|             "name": "Wilson Junior", | ||||
|             "tp": "pf", | ||||
|             "father": {"_id": a1.pk, "tp": "pj"}, | ||||
|         } | ||||
|  | ||||
|     def test_cached_reference_embedded_fields(self): | ||||
|         class Owner(EmbeddedDocument): | ||||
|             TPS = (("n", "Normal"), ("u", "Urgent")) | ||||
|             name = StringField() | ||||
|             tp = StringField(verbose_name="Type", db_field="t", choices=TPS) | ||||
|  | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             tag = StringField() | ||||
|  | ||||
|             owner = EmbeddedDocumentField(Owner) | ||||
|  | ||||
|         class Ocorrence(Document): | ||||
|             person = StringField() | ||||
|             animal = CachedReferenceField(Animal, fields=["tag", "owner.tp"]) | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|         Ocorrence.drop_collection() | ||||
|  | ||||
|         a = Animal( | ||||
|             name="Leopard", tag="heavy", owner=Owner(tp="u", name="Wilson Júnior") | ||||
|         ) | ||||
|         a.save() | ||||
|  | ||||
|         o = Ocorrence(person="teste", animal=a) | ||||
|         o.save() | ||||
|         assert dict(a.to_mongo(fields=["tag", "owner.tp"])) == { | ||||
|             "_id": a.pk, | ||||
|             "tag": "heavy", | ||||
|             "owner": {"t": "u"}, | ||||
|         } | ||||
|         assert o.to_mongo()["animal"]["tag"] == "heavy" | ||||
|         assert o.to_mongo()["animal"]["owner"]["t"] == "u" | ||||
|  | ||||
|         # Check to_mongo with fields | ||||
|         assert "animal" not in o.to_mongo(fields=["person"]) | ||||
|  | ||||
|         # counts | ||||
|         Ocorrence(person="teste 2").save() | ||||
|         Ocorrence(person="teste 3").save() | ||||
|  | ||||
|         count = Ocorrence.objects(animal__tag="heavy", animal__owner__tp="u").count() | ||||
|         assert count == 1 | ||||
|  | ||||
|         ocorrence = Ocorrence.objects( | ||||
|             animal__tag="heavy", animal__owner__tp="u" | ||||
|         ).first() | ||||
|         assert ocorrence.person == "teste" | ||||
|         assert isinstance(ocorrence.animal, Animal) | ||||
|  | ||||
|     def test_cached_reference_embedded_list_fields(self): | ||||
|         class Owner(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|             tags = ListField(StringField()) | ||||
|  | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             tag = StringField() | ||||
|  | ||||
|             owner = EmbeddedDocumentField(Owner) | ||||
|  | ||||
|         class Ocorrence(Document): | ||||
|             person = StringField() | ||||
|             animal = CachedReferenceField(Animal, fields=["tag", "owner.tags"]) | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|         Ocorrence.drop_collection() | ||||
|  | ||||
|         a = Animal( | ||||
|             name="Leopard", | ||||
|             tag="heavy", | ||||
|             owner=Owner(tags=["cool", "funny"], name="Wilson Júnior"), | ||||
|         ) | ||||
|         a.save() | ||||
|  | ||||
|         o = Ocorrence(person="teste 2", animal=a) | ||||
|         o.save() | ||||
|         assert dict(a.to_mongo(fields=["tag", "owner.tags"])) == { | ||||
|             "_id": a.pk, | ||||
|             "tag": "heavy", | ||||
|             "owner": {"tags": ["cool", "funny"]}, | ||||
|         } | ||||
|  | ||||
|         assert o.to_mongo()["animal"]["tag"] == "heavy" | ||||
|         assert o.to_mongo()["animal"]["owner"]["tags"] == ["cool", "funny"] | ||||
|  | ||||
|         # counts | ||||
|         Ocorrence(person="teste 2").save() | ||||
|         Ocorrence(person="teste 3").save() | ||||
|  | ||||
|         query = Ocorrence.objects( | ||||
|             animal__tag="heavy", animal__owner__tags="cool" | ||||
|         )._query | ||||
|         assert query == {"animal.owner.tags": "cool", "animal.tag": "heavy"} | ||||
|  | ||||
|         ocorrence = Ocorrence.objects( | ||||
|             animal__tag="heavy", animal__owner__tags="cool" | ||||
|         ).first() | ||||
|         assert ocorrence.person == "teste 2" | ||||
|         assert isinstance(ocorrence.animal, Animal) | ||||
| @@ -1,193 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import datetime | ||||
| import itertools | ||||
| import math | ||||
| import re | ||||
|  | ||||
| from mongoengine import * | ||||
|  | ||||
| from tests.utils import MongoDBTestCase | ||||
|  | ||||
|  | ||||
| class ComplexDateTimeFieldTest(MongoDBTestCase): | ||||
|     def test_complexdatetime_storage(self): | ||||
|         """Tests for complex datetime fields - which can handle | ||||
|         microseconds without rounding. | ||||
|         """ | ||||
|  | ||||
|         class LogEntry(Document): | ||||
|             date = ComplexDateTimeField() | ||||
|             date_with_dots = ComplexDateTimeField(separator=".") | ||||
|  | ||||
|         LogEntry.drop_collection() | ||||
|  | ||||
|         # Post UTC - microseconds are rounded (down) nearest millisecond and | ||||
|         # dropped - with default datetimefields | ||||
|         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999) | ||||
|         log = LogEntry() | ||||
|         log.date = d1 | ||||
|         log.save() | ||||
|         log.reload() | ||||
|         assert log.date == d1 | ||||
|  | ||||
|         # Post UTC - microseconds are rounded (down) nearest millisecond - with | ||||
|         # default datetimefields | ||||
|         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999) | ||||
|         log.date = d1 | ||||
|         log.save() | ||||
|         log.reload() | ||||
|         assert log.date == d1 | ||||
|  | ||||
|         # Pre UTC dates microseconds below 1000 are dropped - with default | ||||
|         # datetimefields | ||||
|         d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) | ||||
|         log.date = d1 | ||||
|         log.save() | ||||
|         log.reload() | ||||
|         assert log.date == d1 | ||||
|  | ||||
|         # Pre UTC microseconds above 1000 is wonky - with default datetimefields | ||||
|         # log.date has an invalid microsecond value so I can't construct | ||||
|         # a date to compare. | ||||
|         for i in range(1001, 3113, 33): | ||||
|             d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, i) | ||||
|             log.date = d1 | ||||
|             log.save() | ||||
|             log.reload() | ||||
|             assert log.date == d1 | ||||
|             log1 = LogEntry.objects.get(date=d1) | ||||
|             assert log == log1 | ||||
|  | ||||
|         # Test string padding | ||||
|         microsecond = map(int, [math.pow(10, x) for x in range(6)]) | ||||
|         mm = dd = hh = ii = ss = [1, 10] | ||||
|  | ||||
|         for values in itertools.product([2014], mm, dd, hh, ii, ss, microsecond): | ||||
|             stored = LogEntry(date=datetime.datetime(*values)).to_mongo()["date"] | ||||
|             assert ( | ||||
|                 re.match("^\d{4},\d{2},\d{2},\d{2},\d{2},\d{2},\d{6}$", stored) | ||||
|                 is not None | ||||
|             ) | ||||
|  | ||||
|         # Test separator | ||||
|         stored = LogEntry(date_with_dots=datetime.datetime(2014, 1, 1)).to_mongo()[ | ||||
|             "date_with_dots" | ||||
|         ] | ||||
|         assert ( | ||||
|             re.match("^\d{4}.\d{2}.\d{2}.\d{2}.\d{2}.\d{2}.\d{6}$", stored) is not None | ||||
|         ) | ||||
|  | ||||
|     def test_complexdatetime_usage(self): | ||||
|         """Tests for complex datetime fields - which can handle | ||||
|         microseconds without rounding. | ||||
|         """ | ||||
|  | ||||
|         class LogEntry(Document): | ||||
|             date = ComplexDateTimeField() | ||||
|  | ||||
|         LogEntry.drop_collection() | ||||
|  | ||||
|         d1 = datetime.datetime(1950, 1, 1, 0, 0, 1, 999) | ||||
|         log = LogEntry() | ||||
|         log.date = d1 | ||||
|         log.save() | ||||
|  | ||||
|         log1 = LogEntry.objects.get(date=d1) | ||||
|         assert log == log1 | ||||
|  | ||||
|         # create extra 59 log entries for a total of 60 | ||||
|         for i in range(1951, 2010): | ||||
|             d = datetime.datetime(i, 1, 1, 0, 0, 1, 999) | ||||
|             LogEntry(date=d).save() | ||||
|  | ||||
|         assert LogEntry.objects.count() == 60 | ||||
|  | ||||
|         # Test ordering | ||||
|         logs = LogEntry.objects.order_by("date") | ||||
|         i = 0 | ||||
|         while i < 59: | ||||
|             assert logs[i].date <= logs[i + 1].date | ||||
|             i += 1 | ||||
|  | ||||
|         logs = LogEntry.objects.order_by("-date") | ||||
|         i = 0 | ||||
|         while i < 59: | ||||
|             assert logs[i].date >= logs[i + 1].date | ||||
|             i += 1 | ||||
|  | ||||
|         # Test searching | ||||
|         logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1)) | ||||
|         assert logs.count() == 30 | ||||
|  | ||||
|         logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1)) | ||||
|         assert logs.count() == 30 | ||||
|  | ||||
|         logs = LogEntry.objects.filter( | ||||
|             date__lte=datetime.datetime(2011, 1, 1), | ||||
|             date__gte=datetime.datetime(2000, 1, 1), | ||||
|         ) | ||||
|         assert logs.count() == 10 | ||||
|  | ||||
|         LogEntry.drop_collection() | ||||
|  | ||||
|         # Test microsecond-level ordering/filtering | ||||
|         for microsecond in (99, 999, 9999, 10000): | ||||
|             LogEntry(date=datetime.datetime(2015, 1, 1, 0, 0, 0, microsecond)).save() | ||||
|  | ||||
|         logs = list(LogEntry.objects.order_by("date")) | ||||
|         for next_idx, log in enumerate(logs[:-1], start=1): | ||||
|             next_log = logs[next_idx] | ||||
|             assert log.date < next_log.date | ||||
|  | ||||
|         logs = list(LogEntry.objects.order_by("-date")) | ||||
|         for next_idx, log in enumerate(logs[:-1], start=1): | ||||
|             next_log = logs[next_idx] | ||||
|             assert log.date > next_log.date | ||||
|  | ||||
|         logs = LogEntry.objects.filter( | ||||
|             date__lte=datetime.datetime(2015, 1, 1, 0, 0, 0, 10000) | ||||
|         ) | ||||
|         assert logs.count() == 4 | ||||
|  | ||||
|     def test_no_default_value(self): | ||||
|         class Log(Document): | ||||
|             timestamp = ComplexDateTimeField() | ||||
|  | ||||
|         Log.drop_collection() | ||||
|  | ||||
|         log = Log() | ||||
|         assert log.timestamp is None | ||||
|         log.save() | ||||
|  | ||||
|         fetched_log = Log.objects.with_id(log.id) | ||||
|         assert fetched_log.timestamp is None | ||||
|  | ||||
|     def test_default_static_value(self): | ||||
|         NOW = datetime.datetime.utcnow() | ||||
|  | ||||
|         class Log(Document): | ||||
|             timestamp = ComplexDateTimeField(default=NOW) | ||||
|  | ||||
|         Log.drop_collection() | ||||
|  | ||||
|         log = Log() | ||||
|         assert log.timestamp == NOW | ||||
|         log.save() | ||||
|  | ||||
|         fetched_log = Log.objects.with_id(log.id) | ||||
|         assert fetched_log.timestamp == NOW | ||||
|  | ||||
|     def test_default_callable(self): | ||||
|         NOW = datetime.datetime.utcnow() | ||||
|  | ||||
|         class Log(Document): | ||||
|             timestamp = ComplexDateTimeField(default=datetime.datetime.utcnow) | ||||
|  | ||||
|         Log.drop_collection() | ||||
|  | ||||
|         log = Log() | ||||
|         assert log.timestamp >= NOW | ||||
|         log.save() | ||||
|  | ||||
|         fetched_log = Log.objects.with_id(log.id) | ||||
|         assert fetched_log.timestamp >= NOW | ||||
| @@ -1,176 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import datetime | ||||
|  | ||||
| import pytest | ||||
| import six | ||||
|  | ||||
| try: | ||||
|     import dateutil | ||||
| except ImportError: | ||||
|     dateutil = None | ||||
|  | ||||
| from mongoengine import * | ||||
| from tests.utils import MongoDBTestCase | ||||
|  | ||||
|  | ||||
| class TestDateField(MongoDBTestCase): | ||||
|     def test_date_from_empty_string(self): | ||||
|         """ | ||||
|         Ensure an exception is raised when trying to | ||||
|         cast an empty string to datetime. | ||||
|         """ | ||||
|  | ||||
|         class MyDoc(Document): | ||||
|             dt = DateField() | ||||
|  | ||||
|         md = MyDoc(dt="") | ||||
|         with pytest.raises(ValidationError): | ||||
|             md.save() | ||||
|  | ||||
|     def test_date_from_whitespace_string(self): | ||||
|         """ | ||||
|         Ensure an exception is raised when trying to | ||||
|         cast a whitespace-only string to datetime. | ||||
|         """ | ||||
|  | ||||
|         class MyDoc(Document): | ||||
|             dt = DateField() | ||||
|  | ||||
|         md = MyDoc(dt="   ") | ||||
|         with pytest.raises(ValidationError): | ||||
|             md.save() | ||||
|  | ||||
|     def test_default_values_today(self): | ||||
|         """Ensure that default field values are used when creating | ||||
|         a document. | ||||
|         """ | ||||
|  | ||||
|         class Person(Document): | ||||
|             day = DateField(default=datetime.date.today) | ||||
|  | ||||
|         person = Person() | ||||
|         person.validate() | ||||
|         assert person.day == person.day | ||||
|         assert person.day == datetime.date.today() | ||||
|         assert person._data["day"] == person.day | ||||
|  | ||||
|     def test_date(self): | ||||
|         """Tests showing pymongo date fields | ||||
|  | ||||
|         See: http://api.mongodb.org/python/current/api/bson/son.html#dt | ||||
|         """ | ||||
|  | ||||
|         class LogEntry(Document): | ||||
|             date = DateField() | ||||
|  | ||||
|         LogEntry.drop_collection() | ||||
|  | ||||
|         # Test can save dates | ||||
|         log = LogEntry() | ||||
|         log.date = datetime.date.today() | ||||
|         log.save() | ||||
|         log.reload() | ||||
|         assert log.date == datetime.date.today() | ||||
|  | ||||
|         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 999) | ||||
|         d2 = datetime.datetime(1970, 1, 1, 0, 0, 1) | ||||
|         log = LogEntry() | ||||
|         log.date = d1 | ||||
|         log.save() | ||||
|         log.reload() | ||||
|         assert log.date == d1.date() | ||||
|         assert log.date == d2.date() | ||||
|  | ||||
|         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9999) | ||||
|         d2 = datetime.datetime(1970, 1, 1, 0, 0, 1, 9000) | ||||
|         log.date = d1 | ||||
|         log.save() | ||||
|         log.reload() | ||||
|         assert log.date == d1.date() | ||||
|         assert log.date == d2.date() | ||||
|  | ||||
|         if not six.PY3: | ||||
|             # Pre UTC dates microseconds below 1000 are dropped | ||||
|             # This does not seem to be true in PY3 | ||||
|             d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) | ||||
|             d2 = datetime.datetime(1969, 12, 31, 23, 59, 59) | ||||
|             log.date = d1 | ||||
|             log.save() | ||||
|             log.reload() | ||||
|             assert log.date == d1.date() | ||||
|             assert log.date == d2.date() | ||||
|  | ||||
|     def test_regular_usage(self): | ||||
|         """Tests for regular datetime fields""" | ||||
|  | ||||
|         class LogEntry(Document): | ||||
|             date = DateField() | ||||
|  | ||||
|         LogEntry.drop_collection() | ||||
|  | ||||
|         d1 = datetime.datetime(1970, 1, 1, 0, 0, 1) | ||||
|         log = LogEntry() | ||||
|         log.date = d1 | ||||
|         log.validate() | ||||
|         log.save() | ||||
|  | ||||
|         for query in (d1, d1.isoformat(" ")): | ||||
|             log1 = LogEntry.objects.get(date=query) | ||||
|             assert log == log1 | ||||
|  | ||||
|         if dateutil: | ||||
|             log1 = LogEntry.objects.get(date=d1.isoformat("T")) | ||||
|             assert log == log1 | ||||
|  | ||||
|         # create additional 19 log entries for a total of 20 | ||||
|         for i in range(1971, 1990): | ||||
|             d = datetime.datetime(i, 1, 1, 0, 0, 1) | ||||
|             LogEntry(date=d).save() | ||||
|  | ||||
|         assert LogEntry.objects.count() == 20 | ||||
|  | ||||
|         # Test ordering | ||||
|         logs = LogEntry.objects.order_by("date") | ||||
|         i = 0 | ||||
|         while i < 19: | ||||
|             assert logs[i].date <= logs[i + 1].date | ||||
|             i += 1 | ||||
|  | ||||
|         logs = LogEntry.objects.order_by("-date") | ||||
|         i = 0 | ||||
|         while i < 19: | ||||
|             assert logs[i].date >= logs[i + 1].date | ||||
|             i += 1 | ||||
|  | ||||
|         # Test searching | ||||
|         logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1)) | ||||
|         assert logs.count() == 10 | ||||
|  | ||||
|     def test_validation(self): | ||||
|         """Ensure that invalid values cannot be assigned to datetime | ||||
|         fields. | ||||
|         """ | ||||
|  | ||||
|         class LogEntry(Document): | ||||
|             time = DateField() | ||||
|  | ||||
|         log = LogEntry() | ||||
|         log.time = datetime.datetime.now() | ||||
|         log.validate() | ||||
|  | ||||
|         log.time = datetime.date.today() | ||||
|         log.validate() | ||||
|  | ||||
|         log.time = datetime.datetime.now().isoformat(" ") | ||||
|         log.validate() | ||||
|  | ||||
|         if dateutil: | ||||
|             log.time = datetime.datetime.now().isoformat("T") | ||||
|             log.validate() | ||||
|  | ||||
|         log.time = -1 | ||||
|         with pytest.raises(ValidationError): | ||||
|             log.validate() | ||||
|         log.time = "ABC" | ||||
|         with pytest.raises(ValidationError): | ||||
|             log.validate() | ||||
| @@ -1,246 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import datetime as dt | ||||
|  | ||||
| import pytest | ||||
| import six | ||||
|  | ||||
| try: | ||||
|     import dateutil | ||||
| except ImportError: | ||||
|     dateutil = None | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine import connection | ||||
|  | ||||
| from tests.utils import MongoDBTestCase | ||||
|  | ||||
|  | ||||
| class TestDateTimeField(MongoDBTestCase): | ||||
|     def test_datetime_from_empty_string(self): | ||||
|         """ | ||||
|         Ensure an exception is raised when trying to | ||||
|         cast an empty string to datetime. | ||||
|         """ | ||||
|  | ||||
|         class MyDoc(Document): | ||||
|             dt = DateTimeField() | ||||
|  | ||||
|         md = MyDoc(dt="") | ||||
|         with pytest.raises(ValidationError): | ||||
|             md.save() | ||||
|  | ||||
|     def test_datetime_from_whitespace_string(self): | ||||
|         """ | ||||
|         Ensure an exception is raised when trying to | ||||
|         cast a whitespace-only string to datetime. | ||||
|         """ | ||||
|  | ||||
|         class MyDoc(Document): | ||||
|             dt = DateTimeField() | ||||
|  | ||||
|         md = MyDoc(dt="   ") | ||||
|         with pytest.raises(ValidationError): | ||||
|             md.save() | ||||
|  | ||||
|     def test_default_value_utcnow(self): | ||||
|         """Ensure that default field values are used when creating | ||||
|         a document. | ||||
|         """ | ||||
|  | ||||
|         class Person(Document): | ||||
|             created = DateTimeField(default=dt.datetime.utcnow) | ||||
|  | ||||
|         utcnow = dt.datetime.utcnow() | ||||
|         person = Person() | ||||
|         person.validate() | ||||
|         person_created_t0 = person.created | ||||
|         assert person.created - utcnow < dt.timedelta(seconds=1) | ||||
|         assert person_created_t0 == person.created  # make sure it does not change | ||||
|         assert person._data["created"] == person.created | ||||
|  | ||||
|     def test_handling_microseconds(self): | ||||
|         """Tests showing pymongo datetime fields handling of microseconds. | ||||
|         Microseconds are rounded to the nearest millisecond and pre UTC | ||||
|         handling is wonky. | ||||
|  | ||||
|         See: http://api.mongodb.org/python/current/api/bson/son.html#dt | ||||
|         """ | ||||
|  | ||||
|         class LogEntry(Document): | ||||
|             date = DateTimeField() | ||||
|  | ||||
|         LogEntry.drop_collection() | ||||
|  | ||||
|         # Test can save dates | ||||
|         log = LogEntry() | ||||
|         log.date = dt.date.today() | ||||
|         log.save() | ||||
|         log.reload() | ||||
|         assert log.date.date() == dt.date.today() | ||||
|  | ||||
|         # Post UTC - microseconds are rounded (down) nearest millisecond and | ||||
|         # dropped | ||||
|         d1 = dt.datetime(1970, 1, 1, 0, 0, 1, 999) | ||||
|         d2 = dt.datetime(1970, 1, 1, 0, 0, 1) | ||||
|         log = LogEntry() | ||||
|         log.date = d1 | ||||
|         log.save() | ||||
|         log.reload() | ||||
|         assert log.date != d1 | ||||
|         assert log.date == d2 | ||||
|  | ||||
|         # Post UTC - microseconds are rounded (down) nearest millisecond | ||||
|         d1 = dt.datetime(1970, 1, 1, 0, 0, 1, 9999) | ||||
|         d2 = dt.datetime(1970, 1, 1, 0, 0, 1, 9000) | ||||
|         log.date = d1 | ||||
|         log.save() | ||||
|         log.reload() | ||||
|         assert log.date != d1 | ||||
|         assert log.date == d2 | ||||
|  | ||||
|         if not six.PY3: | ||||
|             # Pre UTC dates microseconds below 1000 are dropped | ||||
|             # This does not seem to be true in PY3 | ||||
|             d1 = dt.datetime(1969, 12, 31, 23, 59, 59, 999) | ||||
|             d2 = dt.datetime(1969, 12, 31, 23, 59, 59) | ||||
|             log.date = d1 | ||||
|             log.save() | ||||
|             log.reload() | ||||
|             assert log.date != d1 | ||||
|             assert log.date == d2 | ||||
|  | ||||
|     def test_regular_usage(self): | ||||
|         """Tests for regular datetime fields""" | ||||
|  | ||||
|         class LogEntry(Document): | ||||
|             date = DateTimeField() | ||||
|  | ||||
|         LogEntry.drop_collection() | ||||
|  | ||||
|         d1 = dt.datetime(1970, 1, 1, 0, 0, 1) | ||||
|         log = LogEntry() | ||||
|         log.date = d1 | ||||
|         log.validate() | ||||
|         log.save() | ||||
|  | ||||
|         for query in (d1, d1.isoformat(" ")): | ||||
|             log1 = LogEntry.objects.get(date=query) | ||||
|             assert log == log1 | ||||
|  | ||||
|         if dateutil: | ||||
|             log1 = LogEntry.objects.get(date=d1.isoformat("T")) | ||||
|             assert log == log1 | ||||
|  | ||||
|         # create additional 19 log entries for a total of 20 | ||||
|         for i in range(1971, 1990): | ||||
|             d = dt.datetime(i, 1, 1, 0, 0, 1) | ||||
|             LogEntry(date=d).save() | ||||
|  | ||||
|         assert LogEntry.objects.count() == 20 | ||||
|  | ||||
|         # Test ordering | ||||
|         logs = LogEntry.objects.order_by("date") | ||||
|         i = 0 | ||||
|         while i < 19: | ||||
|             assert logs[i].date <= logs[i + 1].date | ||||
|             i += 1 | ||||
|  | ||||
|         logs = LogEntry.objects.order_by("-date") | ||||
|         i = 0 | ||||
|         while i < 19: | ||||
|             assert logs[i].date >= logs[i + 1].date | ||||
|             i += 1 | ||||
|  | ||||
|         # Test searching | ||||
|         logs = LogEntry.objects.filter(date__gte=dt.datetime(1980, 1, 1)) | ||||
|         assert logs.count() == 10 | ||||
|  | ||||
|         logs = LogEntry.objects.filter(date__lte=dt.datetime(1980, 1, 1)) | ||||
|         assert logs.count() == 10 | ||||
|  | ||||
|         logs = LogEntry.objects.filter( | ||||
|             date__lte=dt.datetime(1980, 1, 1), date__gte=dt.datetime(1975, 1, 1) | ||||
|         ) | ||||
|         assert logs.count() == 5 | ||||
|  | ||||
|     def test_datetime_validation(self): | ||||
|         """Ensure that invalid values cannot be assigned to datetime | ||||
|         fields. | ||||
|         """ | ||||
|  | ||||
|         class LogEntry(Document): | ||||
|             time = DateTimeField() | ||||
|  | ||||
|         log = LogEntry() | ||||
|         log.time = dt.datetime.now() | ||||
|         log.validate() | ||||
|  | ||||
|         log.time = dt.date.today() | ||||
|         log.validate() | ||||
|  | ||||
|         log.time = dt.datetime.now().isoformat(" ") | ||||
|         log.validate() | ||||
|  | ||||
|         log.time = "2019-05-16 21:42:57.897847" | ||||
|         log.validate() | ||||
|  | ||||
|         if dateutil: | ||||
|             log.time = dt.datetime.now().isoformat("T") | ||||
|             log.validate() | ||||
|  | ||||
|         log.time = -1 | ||||
|         with pytest.raises(ValidationError): | ||||
|             log.validate() | ||||
|         log.time = "ABC" | ||||
|         with pytest.raises(ValidationError): | ||||
|             log.validate() | ||||
|         log.time = "2019-05-16 21:GARBAGE:12" | ||||
|         with pytest.raises(ValidationError): | ||||
|             log.validate() | ||||
|         log.time = "2019-05-16 21:42:57.GARBAGE" | ||||
|         with pytest.raises(ValidationError): | ||||
|             log.validate() | ||||
|         log.time = "2019-05-16 21:42:57.123.456" | ||||
|         with pytest.raises(ValidationError): | ||||
|             log.validate() | ||||
|  | ||||
|     def test_parse_datetime_as_str(self): | ||||
|         class DTDoc(Document): | ||||
|             date = DateTimeField() | ||||
|  | ||||
|         date_str = "2019-03-02 22:26:01" | ||||
|  | ||||
|         # make sure that passing a parsable datetime works | ||||
|         dtd = DTDoc() | ||||
|         dtd.date = date_str | ||||
|         assert isinstance(dtd.date, six.string_types) | ||||
|         dtd.save() | ||||
|         dtd.reload() | ||||
|  | ||||
|         assert isinstance(dtd.date, dt.datetime) | ||||
|         assert str(dtd.date) == date_str | ||||
|  | ||||
|         dtd.date = "January 1st, 9999999999" | ||||
|         with pytest.raises(ValidationError): | ||||
|             dtd.validate() | ||||
|  | ||||
|  | ||||
| class TestDateTimeTzAware(MongoDBTestCase): | ||||
|     def test_datetime_tz_aware_mark_as_changed(self): | ||||
|         # Reset the connections | ||||
|         connection._connection_settings = {} | ||||
|         connection._connections = {} | ||||
|         connection._dbs = {} | ||||
|  | ||||
|         connect(db="mongoenginetest", tz_aware=True) | ||||
|  | ||||
|         class LogEntry(Document): | ||||
|             time = DateTimeField() | ||||
|  | ||||
|         LogEntry.drop_collection() | ||||
|  | ||||
|         LogEntry(time=dt.datetime(2013, 1, 1, 0, 0, 0)).save() | ||||
|  | ||||
|         log = LogEntry.objects.first() | ||||
|         log.time = dt.datetime(2013, 1, 1, 0, 0, 0) | ||||
|         assert ["time"] == log._changed_fields | ||||
| @@ -1,110 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| from decimal import Decimal | ||||
|  | ||||
| import pytest | ||||
|  | ||||
| from mongoengine import * | ||||
| from tests.utils import MongoDBTestCase | ||||
|  | ||||
|  | ||||
| class TestDecimalField(MongoDBTestCase): | ||||
|     def test_validation(self): | ||||
|         """Ensure that invalid values cannot be assigned to decimal fields. | ||||
|         """ | ||||
|  | ||||
|         class Person(Document): | ||||
|             height = DecimalField(min_value=Decimal("0.1"), max_value=Decimal("3.5")) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         Person(height=Decimal("1.89")).save() | ||||
|         person = Person.objects.first() | ||||
|         assert person.height == Decimal("1.89") | ||||
|  | ||||
|         person.height = "2.0" | ||||
|         person.save() | ||||
|         person.height = 0.01 | ||||
|         with pytest.raises(ValidationError): | ||||
|             person.validate() | ||||
|         person.height = Decimal("0.01") | ||||
|         with pytest.raises(ValidationError): | ||||
|             person.validate() | ||||
|         person.height = Decimal("4.0") | ||||
|         with pytest.raises(ValidationError): | ||||
|             person.validate() | ||||
|         person.height = "something invalid" | ||||
|         with pytest.raises(ValidationError): | ||||
|             person.validate() | ||||
|  | ||||
|         person_2 = Person(height="something invalid") | ||||
|         with pytest.raises(ValidationError): | ||||
|             person_2.validate() | ||||
|  | ||||
|     def test_comparison(self): | ||||
|         class Person(Document): | ||||
|             money = DecimalField() | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         Person(money=6).save() | ||||
|         Person(money=7).save() | ||||
|         Person(money=8).save() | ||||
|         Person(money=10).save() | ||||
|  | ||||
|         assert 2 == Person.objects(money__gt=Decimal("7")).count() | ||||
|         assert 2 == Person.objects(money__gt=7).count() | ||||
|         assert 2 == Person.objects(money__gt="7").count() | ||||
|  | ||||
|         assert 3 == Person.objects(money__gte="7").count() | ||||
|  | ||||
|     def test_storage(self): | ||||
|         class Person(Document): | ||||
|             float_value = DecimalField(precision=4) | ||||
|             string_value = DecimalField(precision=4, force_string=True) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|         values_to_store = [ | ||||
|             10, | ||||
|             10.1, | ||||
|             10.11, | ||||
|             "10.111", | ||||
|             Decimal("10.1111"), | ||||
|             Decimal("10.11111"), | ||||
|         ] | ||||
|         for store_at_creation in [True, False]: | ||||
|             for value in values_to_store: | ||||
|                 # to_python is called explicitly if values were sent in the kwargs of __init__ | ||||
|                 if store_at_creation: | ||||
|                     Person(float_value=value, string_value=value).save() | ||||
|                 else: | ||||
|                     person = Person.objects.create() | ||||
|                     person.float_value = value | ||||
|                     person.string_value = value | ||||
|                     person.save() | ||||
|  | ||||
|         # How its stored | ||||
|         expected = [ | ||||
|             {"float_value": 10.0, "string_value": "10.0000"}, | ||||
|             {"float_value": 10.1, "string_value": "10.1000"}, | ||||
|             {"float_value": 10.11, "string_value": "10.1100"}, | ||||
|             {"float_value": 10.111, "string_value": "10.1110"}, | ||||
|             {"float_value": 10.1111, "string_value": "10.1111"}, | ||||
|             {"float_value": 10.1111, "string_value": "10.1111"}, | ||||
|         ] | ||||
|         expected.extend(expected) | ||||
|         actual = list(Person.objects.exclude("id").as_pymongo()) | ||||
|         assert expected == actual | ||||
|  | ||||
|         # How it comes out locally | ||||
|         expected = [ | ||||
|             Decimal("10.0000"), | ||||
|             Decimal("10.1000"), | ||||
|             Decimal("10.1100"), | ||||
|             Decimal("10.1110"), | ||||
|             Decimal("10.1111"), | ||||
|             Decimal("10.1111"), | ||||
|         ] | ||||
|         expected.extend(expected) | ||||
|         for field_name in ["float_value", "string_value"]: | ||||
|             actual = list(Person.objects().scalar(field_name)) | ||||
|             assert expected == actual | ||||
| @@ -1,356 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import pytest | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine.base import BaseDict | ||||
| from mongoengine.mongodb_support import MONGODB_36, get_mongodb_version | ||||
|  | ||||
| from tests.utils import MongoDBTestCase, get_as_pymongo | ||||
|  | ||||
|  | ||||
| class TestDictField(MongoDBTestCase): | ||||
|     def test_storage(self): | ||||
|         class BlogPost(Document): | ||||
|             info = DictField() | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         info = {"testkey": "testvalue"} | ||||
|         post = BlogPost(info=info).save() | ||||
|         assert get_as_pymongo(post) == {"_id": post.id, "info": info} | ||||
|  | ||||
|     def test_general_things(self): | ||||
|         """Ensure that dict types work as expected.""" | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             info = DictField() | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         post = BlogPost() | ||||
|         post.info = "my post" | ||||
|         with pytest.raises(ValidationError): | ||||
|             post.validate() | ||||
|  | ||||
|         post.info = ["test", "test"] | ||||
|         with pytest.raises(ValidationError): | ||||
|             post.validate() | ||||
|  | ||||
|         post.info = {"$title": "test"} | ||||
|         with pytest.raises(ValidationError): | ||||
|             post.validate() | ||||
|  | ||||
|         post.info = {"nested": {"$title": "test"}} | ||||
|         with pytest.raises(ValidationError): | ||||
|             post.validate() | ||||
|  | ||||
|         post.info = {"$title.test": "test"} | ||||
|         with pytest.raises(ValidationError): | ||||
|             post.validate() | ||||
|  | ||||
|         post.info = {1: "test"} | ||||
|         with pytest.raises(ValidationError): | ||||
|             post.validate() | ||||
|  | ||||
|         post.info = {"nested": {"the.title": "test"}} | ||||
|         if get_mongodb_version() < MONGODB_36: | ||||
|             with pytest.raises(ValidationError): | ||||
|                 post.validate() | ||||
|         else: | ||||
|             post.validate() | ||||
|  | ||||
|         post.info = {"dollar_and_dot": {"te$st.test": "test"}} | ||||
|         if get_mongodb_version() < MONGODB_36: | ||||
|             with pytest.raises(ValidationError): | ||||
|                 post.validate() | ||||
|         else: | ||||
|             post.validate() | ||||
|  | ||||
|         post.info = {"title": "test"} | ||||
|         post.save() | ||||
|  | ||||
|         post = BlogPost() | ||||
|         post.info = {"title": "dollar_sign", "details": {"te$t": "test"}} | ||||
|         post.save() | ||||
|  | ||||
|         post = BlogPost() | ||||
|         post.info = {"details": {"test": "test"}} | ||||
|         post.save() | ||||
|  | ||||
|         post = BlogPost() | ||||
|         post.info = {"details": {"test": 3}} | ||||
|         post.save() | ||||
|  | ||||
|         assert BlogPost.objects.count() == 4 | ||||
|         assert BlogPost.objects.filter(info__title__exact="test").count() == 1 | ||||
|         assert BlogPost.objects.filter(info__details__test__exact="test").count() == 1 | ||||
|  | ||||
|         post = BlogPost.objects.filter(info__title__exact="dollar_sign").first() | ||||
|         assert "te$t" in post["info"]["details"] | ||||
|  | ||||
|         # Confirm handles non strings or non existing keys | ||||
|         assert BlogPost.objects.filter(info__details__test__exact=5).count() == 0 | ||||
|         assert BlogPost.objects.filter(info__made_up__test__exact="test").count() == 0 | ||||
|  | ||||
|         post = BlogPost.objects.create(info={"title": "original"}) | ||||
|         post.info.update({"title": "updated"}) | ||||
|         post.save() | ||||
|         post.reload() | ||||
|         assert "updated" == post.info["title"] | ||||
|  | ||||
|         post.info.setdefault("authors", []) | ||||
|         post.save() | ||||
|         post.reload() | ||||
|         assert [] == post.info["authors"] | ||||
|  | ||||
|     def test_dictfield_dump_document(self): | ||||
|         """Ensure a DictField can handle another document's dump.""" | ||||
|  | ||||
|         class Doc(Document): | ||||
|             field = DictField() | ||||
|  | ||||
|         class ToEmbed(Document): | ||||
|             id = IntField(primary_key=True, default=1) | ||||
|             recursive = DictField() | ||||
|  | ||||
|         class ToEmbedParent(Document): | ||||
|             id = IntField(primary_key=True, default=1) | ||||
|             recursive = DictField() | ||||
|  | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         class ToEmbedChild(ToEmbedParent): | ||||
|             pass | ||||
|  | ||||
|         to_embed_recursive = ToEmbed(id=1).save() | ||||
|         to_embed = ToEmbed( | ||||
|             id=2, recursive=to_embed_recursive.to_mongo().to_dict() | ||||
|         ).save() | ||||
|         doc = Doc(field=to_embed.to_mongo().to_dict()) | ||||
|         doc.save() | ||||
|         assert isinstance(doc.field, dict) | ||||
|         assert doc.field == {"_id": 2, "recursive": {"_id": 1, "recursive": {}}} | ||||
|         # Same thing with a Document with a _cls field | ||||
|         to_embed_recursive = ToEmbedChild(id=1).save() | ||||
|         to_embed_child = ToEmbedChild( | ||||
|             id=2, recursive=to_embed_recursive.to_mongo().to_dict() | ||||
|         ).save() | ||||
|         doc = Doc(field=to_embed_child.to_mongo().to_dict()) | ||||
|         doc.save() | ||||
|         assert isinstance(doc.field, dict) | ||||
|         expected = { | ||||
|             "_id": 2, | ||||
|             "_cls": "ToEmbedParent.ToEmbedChild", | ||||
|             "recursive": { | ||||
|                 "_id": 1, | ||||
|                 "_cls": "ToEmbedParent.ToEmbedChild", | ||||
|                 "recursive": {}, | ||||
|             }, | ||||
|         } | ||||
|         assert doc.field == expected | ||||
|  | ||||
|     def test_dictfield_strict(self): | ||||
|         """Ensure that dict field handles validation if provided a strict field type.""" | ||||
|  | ||||
|         class Simple(Document): | ||||
|             mapping = DictField(field=IntField()) | ||||
|  | ||||
|         Simple.drop_collection() | ||||
|  | ||||
|         e = Simple() | ||||
|         e.mapping["someint"] = 1 | ||||
|         e.save() | ||||
|  | ||||
|         # try creating an invalid mapping | ||||
|         with pytest.raises(ValidationError): | ||||
|             e.mapping["somestring"] = "abc" | ||||
|             e.save() | ||||
|  | ||||
|     def test_dictfield_complex(self): | ||||
|         """Ensure that the dict field can handle the complex types.""" | ||||
|  | ||||
|         class SettingBase(EmbeddedDocument): | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         class StringSetting(SettingBase): | ||||
|             value = StringField() | ||||
|  | ||||
|         class IntegerSetting(SettingBase): | ||||
|             value = IntField() | ||||
|  | ||||
|         class Simple(Document): | ||||
|             mapping = DictField() | ||||
|  | ||||
|         Simple.drop_collection() | ||||
|  | ||||
|         e = Simple() | ||||
|         e.mapping["somestring"] = StringSetting(value="foo") | ||||
|         e.mapping["someint"] = IntegerSetting(value=42) | ||||
|         e.mapping["nested_dict"] = { | ||||
|             "number": 1, | ||||
|             "string": "Hi!", | ||||
|             "float": 1.001, | ||||
|             "complex": IntegerSetting(value=42), | ||||
|             "list": [IntegerSetting(value=42), StringSetting(value="foo")], | ||||
|         } | ||||
|         e.save() | ||||
|  | ||||
|         e2 = Simple.objects.get(id=e.id) | ||||
|         assert isinstance(e2.mapping["somestring"], StringSetting) | ||||
|         assert isinstance(e2.mapping["someint"], IntegerSetting) | ||||
|  | ||||
|         # Test querying | ||||
|         assert Simple.objects.filter(mapping__someint__value=42).count() == 1 | ||||
|         assert Simple.objects.filter(mapping__nested_dict__number=1).count() == 1 | ||||
|         assert ( | ||||
|             Simple.objects.filter(mapping__nested_dict__complex__value=42).count() == 1 | ||||
|         ) | ||||
|         assert ( | ||||
|             Simple.objects.filter(mapping__nested_dict__list__0__value=42).count() == 1 | ||||
|         ) | ||||
|         assert ( | ||||
|             Simple.objects.filter(mapping__nested_dict__list__1__value="foo").count() | ||||
|             == 1 | ||||
|         ) | ||||
|  | ||||
|         # Confirm can update | ||||
|         Simple.objects().update(set__mapping={"someint": IntegerSetting(value=10)}) | ||||
|         Simple.objects().update( | ||||
|             set__mapping__nested_dict__list__1=StringSetting(value="Boo") | ||||
|         ) | ||||
|         assert ( | ||||
|             Simple.objects.filter(mapping__nested_dict__list__1__value="foo").count() | ||||
|             == 0 | ||||
|         ) | ||||
|         assert ( | ||||
|             Simple.objects.filter(mapping__nested_dict__list__1__value="Boo").count() | ||||
|             == 1 | ||||
|         ) | ||||
|  | ||||
|     def test_push_dict(self): | ||||
|         class MyModel(Document): | ||||
|             events = ListField(DictField()) | ||||
|  | ||||
|         doc = MyModel(events=[{"a": 1}]).save() | ||||
|         raw_doc = get_as_pymongo(doc) | ||||
|         expected_raw_doc = {"_id": doc.id, "events": [{"a": 1}]} | ||||
|         assert raw_doc == expected_raw_doc | ||||
|  | ||||
|         MyModel.objects(id=doc.id).update(push__events={}) | ||||
|         raw_doc = get_as_pymongo(doc) | ||||
|         expected_raw_doc = {"_id": doc.id, "events": [{"a": 1}, {}]} | ||||
|         assert raw_doc == expected_raw_doc | ||||
|  | ||||
|     def test_ensure_unique_default_instances(self): | ||||
|         """Ensure that every field has it's own unique default instance.""" | ||||
|  | ||||
|         class D(Document): | ||||
|             data = DictField() | ||||
|             data2 = DictField(default=lambda: {}) | ||||
|  | ||||
|         d1 = D() | ||||
|         d1.data["foo"] = "bar" | ||||
|         d1.data2["foo"] = "bar" | ||||
|         d2 = D() | ||||
|         assert d2.data == {} | ||||
|         assert d2.data2 == {} | ||||
|  | ||||
|     def test_dict_field_invalid_dict_value(self): | ||||
|         class DictFieldTest(Document): | ||||
|             dictionary = DictField(required=True) | ||||
|  | ||||
|         DictFieldTest.drop_collection() | ||||
|  | ||||
|         test = DictFieldTest(dictionary=None) | ||||
|         test.dictionary  # Just access to test getter | ||||
|         with pytest.raises(ValidationError): | ||||
|             test.validate() | ||||
|  | ||||
|         test = DictFieldTest(dictionary=False) | ||||
|         test.dictionary  # Just access to test getter | ||||
|         with pytest.raises(ValidationError): | ||||
|             test.validate() | ||||
|  | ||||
|     def test_dict_field_raises_validation_error_if_wrongly_assign_embedded_doc(self): | ||||
|         class DictFieldTest(Document): | ||||
|             dictionary = DictField(required=True) | ||||
|  | ||||
|         DictFieldTest.drop_collection() | ||||
|  | ||||
|         class Embedded(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|  | ||||
|         embed = Embedded(name="garbage") | ||||
|         doc = DictFieldTest(dictionary=embed) | ||||
|         with pytest.raises(ValidationError) as exc_info: | ||||
|             doc.validate() | ||||
|  | ||||
|         error_msg = str(exc_info.value) | ||||
|         assert "'dictionary'" in error_msg | ||||
|         assert "Only dictionaries may be used in a DictField" in error_msg | ||||
|  | ||||
|     def test_atomic_update_dict_field(self): | ||||
|         """Ensure that the entire DictField can be atomically updated.""" | ||||
|  | ||||
|         class Simple(Document): | ||||
|             mapping = DictField(field=ListField(IntField(required=True))) | ||||
|  | ||||
|         Simple.drop_collection() | ||||
|  | ||||
|         e = Simple() | ||||
|         e.mapping["someints"] = [1, 2] | ||||
|         e.save() | ||||
|         e.update(set__mapping={"ints": [3, 4]}) | ||||
|         e.reload() | ||||
|         assert isinstance(e.mapping, BaseDict) | ||||
|         assert {"ints": [3, 4]} == e.mapping | ||||
|  | ||||
|         # try creating an invalid mapping | ||||
|         with pytest.raises(ValueError): | ||||
|             e.update(set__mapping={"somestrings": ["foo", "bar"]}) | ||||
|  | ||||
|     def test_dictfield_with_referencefield_complex_nesting_cases(self): | ||||
|         """Ensure complex nesting inside DictField handles dereferencing of ReferenceField(dbref=True | False)""" | ||||
|         # Relates to Issue #1453 | ||||
|         class Doc(Document): | ||||
|             s = StringField() | ||||
|  | ||||
|         class Simple(Document): | ||||
|             mapping0 = DictField(ReferenceField(Doc, dbref=True)) | ||||
|             mapping1 = DictField(ReferenceField(Doc, dbref=False)) | ||||
|             mapping2 = DictField(ListField(ReferenceField(Doc, dbref=True))) | ||||
|             mapping3 = DictField(ListField(ReferenceField(Doc, dbref=False))) | ||||
|             mapping4 = DictField(DictField(field=ReferenceField(Doc, dbref=True))) | ||||
|             mapping5 = DictField(DictField(field=ReferenceField(Doc, dbref=False))) | ||||
|             mapping6 = DictField(ListField(DictField(ReferenceField(Doc, dbref=True)))) | ||||
|             mapping7 = DictField(ListField(DictField(ReferenceField(Doc, dbref=False)))) | ||||
|             mapping8 = DictField( | ||||
|                 ListField(DictField(ListField(ReferenceField(Doc, dbref=True)))) | ||||
|             ) | ||||
|             mapping9 = DictField( | ||||
|                 ListField(DictField(ListField(ReferenceField(Doc, dbref=False)))) | ||||
|             ) | ||||
|  | ||||
|         Doc.drop_collection() | ||||
|         Simple.drop_collection() | ||||
|  | ||||
|         d = Doc(s="aa").save() | ||||
|         e = Simple() | ||||
|         e.mapping0["someint"] = e.mapping1["someint"] = d | ||||
|         e.mapping2["someint"] = e.mapping3["someint"] = [d] | ||||
|         e.mapping4["someint"] = e.mapping5["someint"] = {"d": d} | ||||
|         e.mapping6["someint"] = e.mapping7["someint"] = [{"d": d}] | ||||
|         e.mapping8["someint"] = e.mapping9["someint"] = [{"d": [d]}] | ||||
|         e.save() | ||||
|  | ||||
|         s = Simple.objects.first() | ||||
|         assert isinstance(s.mapping0["someint"], Doc) | ||||
|         assert isinstance(s.mapping1["someint"], Doc) | ||||
|         assert isinstance(s.mapping2["someint"][0], Doc) | ||||
|         assert isinstance(s.mapping3["someint"][0], Doc) | ||||
|         assert isinstance(s.mapping4["someint"]["d"], Doc) | ||||
|         assert isinstance(s.mapping5["someint"]["d"], Doc) | ||||
|         assert isinstance(s.mapping6["someint"][0]["d"], Doc) | ||||
|         assert isinstance(s.mapping7["someint"][0]["d"], Doc) | ||||
|         assert isinstance(s.mapping8["someint"][0]["d"][0], Doc) | ||||
|         assert isinstance(s.mapping9["someint"][0]["d"][0], Doc) | ||||
| @@ -1,137 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import sys | ||||
|  | ||||
| import pytest | ||||
|  | ||||
| from mongoengine import * | ||||
| from tests.utils import MongoDBTestCase | ||||
|  | ||||
|  | ||||
| class TestEmailField(MongoDBTestCase): | ||||
|     def test_generic_behavior(self): | ||||
|         class User(Document): | ||||
|             email = EmailField() | ||||
|  | ||||
|         user = User(email="ross@example.com") | ||||
|         user.validate() | ||||
|  | ||||
|         user = User(email="ross@example.co.uk") | ||||
|         user.validate() | ||||
|  | ||||
|         user = User( | ||||
|             email=("Kofq@rhom0e4klgauOhpbpNdogawnyIKvQS0wk2mjqrgGQ5SaJIazqqWkm7.net") | ||||
|         ) | ||||
|         user.validate() | ||||
|  | ||||
|         user = User(email="new-tld@example.technology") | ||||
|         user.validate() | ||||
|  | ||||
|         user = User(email="ross@example.com.") | ||||
|         with pytest.raises(ValidationError): | ||||
|             user.validate() | ||||
|  | ||||
|         # unicode domain | ||||
|         user = User(email=u"user@пример.рф") | ||||
|         user.validate() | ||||
|  | ||||
|         # invalid unicode domain | ||||
|         user = User(email=u"user@пример") | ||||
|         with pytest.raises(ValidationError): | ||||
|             user.validate() | ||||
|  | ||||
|         # invalid data type | ||||
|         user = User(email=123) | ||||
|         with pytest.raises(ValidationError): | ||||
|             user.validate() | ||||
|  | ||||
|     def test_email_field_unicode_user(self): | ||||
|         class User(Document): | ||||
|             email = EmailField() | ||||
|  | ||||
|         # unicode user shouldn't validate by default... | ||||
|         user = User(email=u"Dörte@Sörensen.example.com") | ||||
|         with pytest.raises(ValidationError): | ||||
|             user.validate() | ||||
|  | ||||
|         # ...but it should be fine with allow_utf8_user set to True | ||||
|         class User(Document): | ||||
|             email = EmailField(allow_utf8_user=True) | ||||
|  | ||||
|         user = User(email=u"Dörte@Sörensen.example.com") | ||||
|         user.validate() | ||||
|  | ||||
|     def test_email_field_domain_whitelist(self): | ||||
|         class User(Document): | ||||
|             email = EmailField() | ||||
|  | ||||
|         # localhost domain shouldn't validate by default... | ||||
|         user = User(email="me@localhost") | ||||
|         with pytest.raises(ValidationError): | ||||
|             user.validate() | ||||
|  | ||||
|         # ...but it should be fine if it's whitelisted | ||||
|         class User(Document): | ||||
|             email = EmailField(domain_whitelist=["localhost"]) | ||||
|  | ||||
|         user = User(email="me@localhost") | ||||
|         user.validate() | ||||
|  | ||||
|     def test_email_domain_validation_fails_if_invalid_idn(self): | ||||
|         class User(Document): | ||||
|             email = EmailField() | ||||
|  | ||||
|         invalid_idn = ".google.com" | ||||
|         user = User(email="me@%s" % invalid_idn) | ||||
|  | ||||
|         with pytest.raises(ValidationError) as exc_info: | ||||
|             user.validate() | ||||
|         assert "domain failed IDN encoding" in str(exc_info.value) | ||||
|  | ||||
|     def test_email_field_ip_domain(self): | ||||
|         class User(Document): | ||||
|             email = EmailField() | ||||
|  | ||||
|         valid_ipv4 = "email@[127.0.0.1]" | ||||
|         valid_ipv6 = "email@[2001:dB8::1]" | ||||
|         invalid_ip = "email@[324.0.0.1]" | ||||
|  | ||||
|         # IP address as a domain shouldn't validate by default... | ||||
|         user = User(email=valid_ipv4) | ||||
|         with pytest.raises(ValidationError): | ||||
|             user.validate() | ||||
|  | ||||
|         user = User(email=valid_ipv6) | ||||
|         with pytest.raises(ValidationError): | ||||
|             user.validate() | ||||
|  | ||||
|         user = User(email=invalid_ip) | ||||
|         with pytest.raises(ValidationError): | ||||
|             user.validate() | ||||
|  | ||||
|         # ...but it should be fine with allow_ip_domain set to True | ||||
|         class User(Document): | ||||
|             email = EmailField(allow_ip_domain=True) | ||||
|  | ||||
|         user = User(email=valid_ipv4) | ||||
|         user.validate() | ||||
|  | ||||
|         user = User(email=valid_ipv6) | ||||
|         user.validate() | ||||
|  | ||||
|         # invalid IP should still fail validation | ||||
|         user = User(email=invalid_ip) | ||||
|         with pytest.raises(ValidationError): | ||||
|             user.validate() | ||||
|  | ||||
|     def test_email_field_honors_regex(self): | ||||
|         class User(Document): | ||||
|             email = EmailField(regex=r"\w+@example.com") | ||||
|  | ||||
|         # Fails regex validation | ||||
|         user = User(email="me@foo.com") | ||||
|         with pytest.raises(ValidationError): | ||||
|             user.validate() | ||||
|  | ||||
|         # Passes regex validation | ||||
|         user = User(email="me@example.com") | ||||
|         assert user.validate() is None | ||||
| @@ -1,354 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import pytest | ||||
|  | ||||
| from mongoengine import ( | ||||
|     Document, | ||||
|     EmbeddedDocument, | ||||
|     EmbeddedDocumentField, | ||||
|     GenericEmbeddedDocumentField, | ||||
|     IntField, | ||||
|     InvalidQueryError, | ||||
|     ListField, | ||||
|     LookUpError, | ||||
|     StringField, | ||||
|     ValidationError, | ||||
| ) | ||||
|  | ||||
| from tests.utils import MongoDBTestCase | ||||
|  | ||||
|  | ||||
| class TestEmbeddedDocumentField(MongoDBTestCase): | ||||
|     def test___init___(self): | ||||
|         class MyDoc(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|  | ||||
|         field = EmbeddedDocumentField(MyDoc) | ||||
|         assert field.document_type_obj == MyDoc | ||||
|  | ||||
|         field2 = EmbeddedDocumentField("MyDoc") | ||||
|         assert field2.document_type_obj == "MyDoc" | ||||
|  | ||||
|     def test___init___throw_error_if_document_type_is_not_EmbeddedDocument(self): | ||||
|         with pytest.raises(ValidationError): | ||||
|             EmbeddedDocumentField(dict) | ||||
|  | ||||
|     def test_document_type_throw_error_if_not_EmbeddedDocument_subclass(self): | ||||
|         class MyDoc(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         emb = EmbeddedDocumentField("MyDoc") | ||||
|         with pytest.raises(ValidationError) as exc_info: | ||||
|             emb.document_type | ||||
|         assert ( | ||||
|             "Invalid embedded document class provided to an EmbeddedDocumentField" | ||||
|             in str(exc_info.value) | ||||
|         ) | ||||
|  | ||||
|     def test_embedded_document_field_only_allow_subclasses_of_embedded_document(self): | ||||
|         # Relates to #1661 | ||||
|         class MyDoc(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         with pytest.raises(ValidationError): | ||||
|  | ||||
|             class MyFailingDoc(Document): | ||||
|                 emb = EmbeddedDocumentField(MyDoc) | ||||
|  | ||||
|         with pytest.raises(ValidationError): | ||||
|  | ||||
|             class MyFailingdoc2(Document): | ||||
|                 emb = EmbeddedDocumentField("MyDoc") | ||||
|  | ||||
|     def test_query_embedded_document_attribute(self): | ||||
|         class AdminSettings(EmbeddedDocument): | ||||
|             foo1 = StringField() | ||||
|             foo2 = StringField() | ||||
|  | ||||
|         class Person(Document): | ||||
|             settings = EmbeddedDocumentField(AdminSettings) | ||||
|             name = StringField() | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         p = Person(settings=AdminSettings(foo1="bar1", foo2="bar2"), name="John").save() | ||||
|  | ||||
|         # Test non exiting attribute | ||||
|         with pytest.raises(InvalidQueryError) as exc_info: | ||||
|             Person.objects(settings__notexist="bar").first() | ||||
|         assert unicode(exc_info.value) == u'Cannot resolve field "notexist"' | ||||
|  | ||||
|         with pytest.raises(LookUpError): | ||||
|             Person.objects.only("settings.notexist") | ||||
|  | ||||
|         # Test existing attribute | ||||
|         assert Person.objects(settings__foo1="bar1").first().id == p.id | ||||
|         only_p = Person.objects.only("settings.foo1").first() | ||||
|         assert only_p.settings.foo1 == p.settings.foo1 | ||||
|         assert only_p.settings.foo2 is None | ||||
|         assert only_p.name is None | ||||
|  | ||||
|         exclude_p = Person.objects.exclude("settings.foo1").first() | ||||
|         assert exclude_p.settings.foo1 is None | ||||
|         assert exclude_p.settings.foo2 == p.settings.foo2 | ||||
|         assert exclude_p.name == p.name | ||||
|  | ||||
|     def test_query_embedded_document_attribute_with_inheritance(self): | ||||
|         class BaseSettings(EmbeddedDocument): | ||||
|             meta = {"allow_inheritance": True} | ||||
|             base_foo = StringField() | ||||
|  | ||||
|         class AdminSettings(BaseSettings): | ||||
|             sub_foo = StringField() | ||||
|  | ||||
|         class Person(Document): | ||||
|             settings = EmbeddedDocumentField(BaseSettings) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         p = Person(settings=AdminSettings(base_foo="basefoo", sub_foo="subfoo")) | ||||
|         p.save() | ||||
|  | ||||
|         # Test non exiting attribute | ||||
|         with pytest.raises(InvalidQueryError) as exc_info: | ||||
|             assert Person.objects(settings__notexist="bar").first().id == p.id | ||||
|         assert unicode(exc_info.value) == u'Cannot resolve field "notexist"' | ||||
|  | ||||
|         # Test existing attribute | ||||
|         assert Person.objects(settings__base_foo="basefoo").first().id == p.id | ||||
|         assert Person.objects(settings__sub_foo="subfoo").first().id == p.id | ||||
|  | ||||
|         only_p = Person.objects.only("settings.base_foo", "settings._cls").first() | ||||
|         assert only_p.settings.base_foo == "basefoo" | ||||
|         assert only_p.settings.sub_foo is None | ||||
|  | ||||
|     def test_query_list_embedded_document_with_inheritance(self): | ||||
|         class Post(EmbeddedDocument): | ||||
|             title = StringField(max_length=120, required=True) | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         class TextPost(Post): | ||||
|             content = StringField() | ||||
|  | ||||
|         class MoviePost(Post): | ||||
|             author = StringField() | ||||
|  | ||||
|         class Record(Document): | ||||
|             posts = ListField(EmbeddedDocumentField(Post)) | ||||
|  | ||||
|         record_movie = Record(posts=[MoviePost(author="John", title="foo")]).save() | ||||
|         record_text = Record(posts=[TextPost(content="a", title="foo")]).save() | ||||
|  | ||||
|         records = list(Record.objects(posts__author=record_movie.posts[0].author)) | ||||
|         assert len(records) == 1 | ||||
|         assert records[0].id == record_movie.id | ||||
|  | ||||
|         records = list(Record.objects(posts__content=record_text.posts[0].content)) | ||||
|         assert len(records) == 1 | ||||
|         assert records[0].id == record_text.id | ||||
|  | ||||
|         assert Record.objects(posts__title="foo").count() == 2 | ||||
|  | ||||
|  | ||||
| class TestGenericEmbeddedDocumentField(MongoDBTestCase): | ||||
|     def test_generic_embedded_document(self): | ||||
|         class Car(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|  | ||||
|         class Dish(EmbeddedDocument): | ||||
|             food = StringField(required=True) | ||||
|             number = IntField() | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             like = GenericEmbeddedDocumentField() | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         person = Person(name="Test User") | ||||
|         person.like = Car(name="Fiat") | ||||
|         person.save() | ||||
|  | ||||
|         person = Person.objects.first() | ||||
|         assert isinstance(person.like, Car) | ||||
|  | ||||
|         person.like = Dish(food="arroz", number=15) | ||||
|         person.save() | ||||
|  | ||||
|         person = Person.objects.first() | ||||
|         assert isinstance(person.like, Dish) | ||||
|  | ||||
|     def test_generic_embedded_document_choices(self): | ||||
|         """Ensure you can limit GenericEmbeddedDocument choices.""" | ||||
|  | ||||
|         class Car(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|  | ||||
|         class Dish(EmbeddedDocument): | ||||
|             food = StringField(required=True) | ||||
|             number = IntField() | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             like = GenericEmbeddedDocumentField(choices=(Dish,)) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         person = Person(name="Test User") | ||||
|         person.like = Car(name="Fiat") | ||||
|         with pytest.raises(ValidationError): | ||||
|             person.validate() | ||||
|  | ||||
|         person.like = Dish(food="arroz", number=15) | ||||
|         person.save() | ||||
|  | ||||
|         person = Person.objects.first() | ||||
|         assert isinstance(person.like, Dish) | ||||
|  | ||||
|     def test_generic_list_embedded_document_choices(self): | ||||
|         """Ensure you can limit GenericEmbeddedDocument choices inside | ||||
|         a list field. | ||||
|         """ | ||||
|  | ||||
|         class Car(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|  | ||||
|         class Dish(EmbeddedDocument): | ||||
|             food = StringField(required=True) | ||||
|             number = IntField() | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             likes = ListField(GenericEmbeddedDocumentField(choices=(Dish,))) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         person = Person(name="Test User") | ||||
|         person.likes = [Car(name="Fiat")] | ||||
|         with pytest.raises(ValidationError): | ||||
|             person.validate() | ||||
|  | ||||
|         person.likes = [Dish(food="arroz", number=15)] | ||||
|         person.save() | ||||
|  | ||||
|         person = Person.objects.first() | ||||
|         assert isinstance(person.likes[0], Dish) | ||||
|  | ||||
|     def test_choices_validation_documents(self): | ||||
|         """ | ||||
|         Ensure fields with document choices validate given a valid choice. | ||||
|         """ | ||||
|  | ||||
|         class UserComments(EmbeddedDocument): | ||||
|             author = StringField() | ||||
|             message = StringField() | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             comments = ListField(GenericEmbeddedDocumentField(choices=(UserComments,))) | ||||
|  | ||||
|         # Ensure Validation Passes | ||||
|         BlogPost(comments=[UserComments(author="user2", message="message2")]).save() | ||||
|  | ||||
|     def test_choices_validation_documents_invalid(self): | ||||
|         """ | ||||
|         Ensure fields with document choices validate given an invalid choice. | ||||
|         This should throw a ValidationError exception. | ||||
|         """ | ||||
|  | ||||
|         class UserComments(EmbeddedDocument): | ||||
|             author = StringField() | ||||
|             message = StringField() | ||||
|  | ||||
|         class ModeratorComments(EmbeddedDocument): | ||||
|             author = StringField() | ||||
|             message = StringField() | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             comments = ListField(GenericEmbeddedDocumentField(choices=(UserComments,))) | ||||
|  | ||||
|         # Single Entry Failure | ||||
|         post = BlogPost(comments=[ModeratorComments(author="mod1", message="message1")]) | ||||
|         with pytest.raises(ValidationError): | ||||
|             post.save() | ||||
|  | ||||
|         # Mixed Entry Failure | ||||
|         post = BlogPost( | ||||
|             comments=[ | ||||
|                 ModeratorComments(author="mod1", message="message1"), | ||||
|                 UserComments(author="user2", message="message2"), | ||||
|             ] | ||||
|         ) | ||||
|         with pytest.raises(ValidationError): | ||||
|             post.save() | ||||
|  | ||||
|     def test_choices_validation_documents_inheritance(self): | ||||
|         """ | ||||
|         Ensure fields with document choices validate given subclass of choice. | ||||
|         """ | ||||
|  | ||||
|         class Comments(EmbeddedDocument): | ||||
|             meta = {"abstract": True} | ||||
|             author = StringField() | ||||
|             message = StringField() | ||||
|  | ||||
|         class UserComments(Comments): | ||||
|             pass | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             comments = ListField(GenericEmbeddedDocumentField(choices=(Comments,))) | ||||
|  | ||||
|         # Save Valid EmbeddedDocument Type | ||||
|         BlogPost(comments=[UserComments(author="user2", message="message2")]).save() | ||||
|  | ||||
|     def test_query_generic_embedded_document_attribute(self): | ||||
|         class AdminSettings(EmbeddedDocument): | ||||
|             foo1 = StringField() | ||||
|  | ||||
|         class NonAdminSettings(EmbeddedDocument): | ||||
|             foo2 = StringField() | ||||
|  | ||||
|         class Person(Document): | ||||
|             settings = GenericEmbeddedDocumentField( | ||||
|                 choices=(AdminSettings, NonAdminSettings) | ||||
|             ) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         p1 = Person(settings=AdminSettings(foo1="bar1")).save() | ||||
|         p2 = Person(settings=NonAdminSettings(foo2="bar2")).save() | ||||
|  | ||||
|         # Test non exiting attribute | ||||
|         with pytest.raises(InvalidQueryError) as exc_info: | ||||
|             Person.objects(settings__notexist="bar").first() | ||||
|         assert unicode(exc_info.value) == u'Cannot resolve field "notexist"' | ||||
|  | ||||
|         with pytest.raises(LookUpError): | ||||
|             Person.objects.only("settings.notexist") | ||||
|  | ||||
|         # Test existing attribute | ||||
|         assert Person.objects(settings__foo1="bar1").first().id == p1.id | ||||
|         assert Person.objects(settings__foo2="bar2").first().id == p2.id | ||||
|  | ||||
|     def test_query_generic_embedded_document_attribute_with_inheritance(self): | ||||
|         class BaseSettings(EmbeddedDocument): | ||||
|             meta = {"allow_inheritance": True} | ||||
|             base_foo = StringField() | ||||
|  | ||||
|         class AdminSettings(BaseSettings): | ||||
|             sub_foo = StringField() | ||||
|  | ||||
|         class Person(Document): | ||||
|             settings = GenericEmbeddedDocumentField(choices=[BaseSettings]) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         p = Person(settings=AdminSettings(base_foo="basefoo", sub_foo="subfoo")) | ||||
|         p.save() | ||||
|  | ||||
|         # Test non exiting attribute | ||||
|         with pytest.raises(InvalidQueryError) as exc_info: | ||||
|             assert Person.objects(settings__notexist="bar").first().id == p.id | ||||
|         assert unicode(exc_info.value) == u'Cannot resolve field "notexist"' | ||||
|  | ||||
|         # Test existing attribute | ||||
|         assert Person.objects(settings__base_foo="basefoo").first().id == p.id | ||||
|         assert Person.objects(settings__sub_foo="subfoo").first().id == p.id | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,64 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import pytest | ||||
| import six | ||||
|  | ||||
| from mongoengine import * | ||||
|  | ||||
| from tests.utils import MongoDBTestCase | ||||
|  | ||||
|  | ||||
| class TestFloatField(MongoDBTestCase): | ||||
|     def test_float_ne_operator(self): | ||||
|         class TestDocument(Document): | ||||
|             float_fld = FloatField() | ||||
|  | ||||
|         TestDocument.drop_collection() | ||||
|  | ||||
|         TestDocument(float_fld=None).save() | ||||
|         TestDocument(float_fld=1).save() | ||||
|  | ||||
|         assert 1 == TestDocument.objects(float_fld__ne=None).count() | ||||
|         assert 1 == TestDocument.objects(float_fld__ne=1).count() | ||||
|  | ||||
|     def test_validation(self): | ||||
|         """Ensure that invalid values cannot be assigned to float fields. | ||||
|         """ | ||||
|  | ||||
|         class Person(Document): | ||||
|             height = FloatField(min_value=0.1, max_value=3.5) | ||||
|  | ||||
|         class BigPerson(Document): | ||||
|             height = FloatField() | ||||
|  | ||||
|         person = Person() | ||||
|         person.height = 1.89 | ||||
|         person.validate() | ||||
|  | ||||
|         person.height = "2.0" | ||||
|         with pytest.raises(ValidationError): | ||||
|             person.validate() | ||||
|  | ||||
|         person.height = 0.01 | ||||
|         with pytest.raises(ValidationError): | ||||
|             person.validate() | ||||
|  | ||||
|         person.height = 4.0 | ||||
|         with pytest.raises(ValidationError): | ||||
|             person.validate() | ||||
|  | ||||
|         person_2 = Person(height="something invalid") | ||||
|         with pytest.raises(ValidationError): | ||||
|             person_2.validate() | ||||
|  | ||||
|         big_person = BigPerson() | ||||
|  | ||||
|         for value, value_type in enumerate(six.integer_types): | ||||
|             big_person.height = value_type(value) | ||||
|             big_person.validate() | ||||
|  | ||||
|         big_person.height = 2 ** 500 | ||||
|         big_person.validate() | ||||
|  | ||||
|         big_person.height = 2 ** 100000  # Too big for a float value | ||||
|         with pytest.raises(ValidationError): | ||||
|             big_person.validate() | ||||
| @@ -1,47 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import pytest | ||||
|  | ||||
| from mongoengine import * | ||||
|  | ||||
| from tests.utils import MongoDBTestCase | ||||
|  | ||||
|  | ||||
| class TestIntField(MongoDBTestCase): | ||||
|     def test_int_validation(self): | ||||
|         """Ensure that invalid values cannot be assigned to int fields. | ||||
|         """ | ||||
|  | ||||
|         class Person(Document): | ||||
|             age = IntField(min_value=0, max_value=110) | ||||
|  | ||||
|         person = Person() | ||||
|         person.age = 0 | ||||
|         person.validate() | ||||
|  | ||||
|         person.age = 50 | ||||
|         person.validate() | ||||
|  | ||||
|         person.age = 110 | ||||
|         person.validate() | ||||
|  | ||||
|         person.age = -1 | ||||
|         with pytest.raises(ValidationError): | ||||
|             person.validate() | ||||
|         person.age = 120 | ||||
|         with pytest.raises(ValidationError): | ||||
|             person.validate() | ||||
|         person.age = "ten" | ||||
|         with pytest.raises(ValidationError): | ||||
|             person.validate() | ||||
|  | ||||
|     def test_ne_operator(self): | ||||
|         class TestDocument(Document): | ||||
|             int_fld = IntField() | ||||
|  | ||||
|         TestDocument.drop_collection() | ||||
|  | ||||
|         TestDocument(int_fld=None).save() | ||||
|         TestDocument(int_fld=1).save() | ||||
|  | ||||
|         assert 1 == TestDocument.objects(int_fld__ne=None).count() | ||||
|         assert 1 == TestDocument.objects(int_fld__ne=1).count() | ||||
| @@ -1,576 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| from bson import DBRef, ObjectId | ||||
| import pytest | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine.base import LazyReference | ||||
|  | ||||
| from tests.utils import MongoDBTestCase | ||||
|  | ||||
|  | ||||
| class TestLazyReferenceField(MongoDBTestCase): | ||||
|     def test_lazy_reference_config(self): | ||||
|         # Make sure ReferenceField only accepts a document class or a string | ||||
|         # with a document class name. | ||||
|         with pytest.raises(ValidationError): | ||||
|             LazyReferenceField(EmbeddedDocument) | ||||
|  | ||||
|     def test___repr__(self): | ||||
|         class Animal(Document): | ||||
|             pass | ||||
|  | ||||
|         class Ocurrence(Document): | ||||
|             animal = LazyReferenceField(Animal) | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|         Ocurrence.drop_collection() | ||||
|  | ||||
|         animal = Animal() | ||||
|         oc = Ocurrence(animal=animal) | ||||
|         assert "LazyReference" in repr(oc.animal) | ||||
|  | ||||
|     def test___getattr___unknown_attr_raises_attribute_error(self): | ||||
|         class Animal(Document): | ||||
|             pass | ||||
|  | ||||
|         class Ocurrence(Document): | ||||
|             animal = LazyReferenceField(Animal) | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|         Ocurrence.drop_collection() | ||||
|  | ||||
|         animal = Animal().save() | ||||
|         oc = Ocurrence(animal=animal) | ||||
|         with pytest.raises(AttributeError): | ||||
|             oc.animal.not_exist | ||||
|  | ||||
|     def test_lazy_reference_simple(self): | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             tag = StringField() | ||||
|  | ||||
|         class Ocurrence(Document): | ||||
|             person = StringField() | ||||
|             animal = LazyReferenceField(Animal) | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|         Ocurrence.drop_collection() | ||||
|  | ||||
|         animal = Animal(name="Leopard", tag="heavy").save() | ||||
|         Ocurrence(person="test", animal=animal).save() | ||||
|         p = Ocurrence.objects.get() | ||||
|         assert isinstance(p.animal, LazyReference) | ||||
|         fetched_animal = p.animal.fetch() | ||||
|         assert fetched_animal == animal | ||||
|         # `fetch` keep cache on referenced document by default... | ||||
|         animal.tag = "not so heavy" | ||||
|         animal.save() | ||||
|         double_fetch = p.animal.fetch() | ||||
|         assert fetched_animal is double_fetch | ||||
|         assert double_fetch.tag == "heavy" | ||||
|         # ...unless specified otherwise | ||||
|         fetch_force = p.animal.fetch(force=True) | ||||
|         assert fetch_force is not fetched_animal | ||||
|         assert fetch_force.tag == "not so heavy" | ||||
|  | ||||
|     def test_lazy_reference_fetch_invalid_ref(self): | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             tag = StringField() | ||||
|  | ||||
|         class Ocurrence(Document): | ||||
|             person = StringField() | ||||
|             animal = LazyReferenceField(Animal) | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|         Ocurrence.drop_collection() | ||||
|  | ||||
|         animal = Animal(name="Leopard", tag="heavy").save() | ||||
|         Ocurrence(person="test", animal=animal).save() | ||||
|         animal.delete() | ||||
|         p = Ocurrence.objects.get() | ||||
|         assert isinstance(p.animal, LazyReference) | ||||
|         with pytest.raises(DoesNotExist): | ||||
|             p.animal.fetch() | ||||
|  | ||||
|     def test_lazy_reference_set(self): | ||||
|         class Animal(Document): | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|             name = StringField() | ||||
|             tag = StringField() | ||||
|  | ||||
|         class Ocurrence(Document): | ||||
|             person = StringField() | ||||
|             animal = LazyReferenceField(Animal) | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|         Ocurrence.drop_collection() | ||||
|  | ||||
|         class SubAnimal(Animal): | ||||
|             nick = StringField() | ||||
|  | ||||
|         animal = Animal(name="Leopard", tag="heavy").save() | ||||
|         sub_animal = SubAnimal(nick="doggo", name="dog").save() | ||||
|         for ref in ( | ||||
|             animal, | ||||
|             animal.pk, | ||||
|             DBRef(animal._get_collection_name(), animal.pk), | ||||
|             LazyReference(Animal, animal.pk), | ||||
|             sub_animal, | ||||
|             sub_animal.pk, | ||||
|             DBRef(sub_animal._get_collection_name(), sub_animal.pk), | ||||
|             LazyReference(SubAnimal, sub_animal.pk), | ||||
|         ): | ||||
|             p = Ocurrence(person="test", animal=ref).save() | ||||
|             p.reload() | ||||
|             assert isinstance(p.animal, LazyReference) | ||||
|             p.animal.fetch() | ||||
|  | ||||
|     def test_lazy_reference_bad_set(self): | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             tag = StringField() | ||||
|  | ||||
|         class Ocurrence(Document): | ||||
|             person = StringField() | ||||
|             animal = LazyReferenceField(Animal) | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|         Ocurrence.drop_collection() | ||||
|  | ||||
|         class BadDoc(Document): | ||||
|             pass | ||||
|  | ||||
|         animal = Animal(name="Leopard", tag="heavy").save() | ||||
|         baddoc = BadDoc().save() | ||||
|         for bad in ( | ||||
|             42, | ||||
|             "foo", | ||||
|             baddoc, | ||||
|             DBRef(baddoc._get_collection_name(), animal.pk), | ||||
|             LazyReference(BadDoc, animal.pk), | ||||
|         ): | ||||
|             with pytest.raises(ValidationError): | ||||
|                 p = Ocurrence(person="test", animal=bad).save() | ||||
|  | ||||
|     def test_lazy_reference_query_conversion(self): | ||||
|         """Ensure that LazyReferenceFields can be queried using objects and values | ||||
|         of the type of the primary key of the referenced object. | ||||
|         """ | ||||
|  | ||||
|         class Member(Document): | ||||
|             user_num = IntField(primary_key=True) | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             title = StringField() | ||||
|             author = LazyReferenceField(Member, dbref=False) | ||||
|  | ||||
|         Member.drop_collection() | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         m1 = Member(user_num=1) | ||||
|         m1.save() | ||||
|         m2 = Member(user_num=2) | ||||
|         m2.save() | ||||
|  | ||||
|         post1 = BlogPost(title="post 1", author=m1) | ||||
|         post1.save() | ||||
|  | ||||
|         post2 = BlogPost(title="post 2", author=m2) | ||||
|         post2.save() | ||||
|  | ||||
|         post = BlogPost.objects(author=m1).first() | ||||
|         assert post.id == post1.id | ||||
|  | ||||
|         post = BlogPost.objects(author=m2).first() | ||||
|         assert post.id == post2.id | ||||
|  | ||||
|         # Same thing by passing a LazyReference instance | ||||
|         post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() | ||||
|         assert post.id == post2.id | ||||
|  | ||||
|     def test_lazy_reference_query_conversion_dbref(self): | ||||
|         """Ensure that LazyReferenceFields can be queried using objects and values | ||||
|         of the type of the primary key of the referenced object. | ||||
|         """ | ||||
|  | ||||
|         class Member(Document): | ||||
|             user_num = IntField(primary_key=True) | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             title = StringField() | ||||
|             author = LazyReferenceField(Member, dbref=True) | ||||
|  | ||||
|         Member.drop_collection() | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         m1 = Member(user_num=1) | ||||
|         m1.save() | ||||
|         m2 = Member(user_num=2) | ||||
|         m2.save() | ||||
|  | ||||
|         post1 = BlogPost(title="post 1", author=m1) | ||||
|         post1.save() | ||||
|  | ||||
|         post2 = BlogPost(title="post 2", author=m2) | ||||
|         post2.save() | ||||
|  | ||||
|         post = BlogPost.objects(author=m1).first() | ||||
|         assert post.id == post1.id | ||||
|  | ||||
|         post = BlogPost.objects(author=m2).first() | ||||
|         assert post.id == post2.id | ||||
|  | ||||
|         # Same thing by passing a LazyReference instance | ||||
|         post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() | ||||
|         assert post.id == post2.id | ||||
|  | ||||
|     def test_lazy_reference_passthrough(self): | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             tag = StringField() | ||||
|  | ||||
|         class Ocurrence(Document): | ||||
|             animal = LazyReferenceField(Animal, passthrough=False) | ||||
|             animal_passthrough = LazyReferenceField(Animal, passthrough=True) | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|         Ocurrence.drop_collection() | ||||
|  | ||||
|         animal = Animal(name="Leopard", tag="heavy").save() | ||||
|         Ocurrence(animal=animal, animal_passthrough=animal).save() | ||||
|         p = Ocurrence.objects.get() | ||||
|         assert isinstance(p.animal, LazyReference) | ||||
|         with pytest.raises(KeyError): | ||||
|             p.animal["name"] | ||||
|         with pytest.raises(AttributeError): | ||||
|             p.animal.name | ||||
|         assert p.animal.pk == animal.pk | ||||
|  | ||||
|         assert p.animal_passthrough.name == "Leopard" | ||||
|         assert p.animal_passthrough["name"] == "Leopard" | ||||
|  | ||||
|         # Should not be able to access referenced document's methods | ||||
|         with pytest.raises(AttributeError): | ||||
|             p.animal.save | ||||
|         with pytest.raises(KeyError): | ||||
|             p.animal["save"] | ||||
|  | ||||
|     def test_lazy_reference_not_set(self): | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             tag = StringField() | ||||
|  | ||||
|         class Ocurrence(Document): | ||||
|             person = StringField() | ||||
|             animal = LazyReferenceField(Animal) | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|         Ocurrence.drop_collection() | ||||
|  | ||||
|         Ocurrence(person="foo").save() | ||||
|         p = Ocurrence.objects.get() | ||||
|         assert p.animal is None | ||||
|  | ||||
|     def test_lazy_reference_equality(self): | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             tag = StringField() | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|  | ||||
|         animal = Animal(name="Leopard", tag="heavy").save() | ||||
|         animalref = LazyReference(Animal, animal.pk) | ||||
|         assert animal == animalref | ||||
|         assert animalref == animal | ||||
|  | ||||
|         other_animalref = LazyReference(Animal, ObjectId("54495ad94c934721ede76f90")) | ||||
|         assert animal != other_animalref | ||||
|         assert other_animalref != animal | ||||
|  | ||||
|     def test_lazy_reference_embedded(self): | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             tag = StringField() | ||||
|  | ||||
|         class EmbeddedOcurrence(EmbeddedDocument): | ||||
|             in_list = ListField(LazyReferenceField(Animal)) | ||||
|             direct = LazyReferenceField(Animal) | ||||
|  | ||||
|         class Ocurrence(Document): | ||||
|             in_list = ListField(LazyReferenceField(Animal)) | ||||
|             in_embedded = EmbeddedDocumentField(EmbeddedOcurrence) | ||||
|             direct = LazyReferenceField(Animal) | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|         Ocurrence.drop_collection() | ||||
|  | ||||
|         animal1 = Animal(name="doggo").save() | ||||
|         animal2 = Animal(name="cheeta").save() | ||||
|  | ||||
|         def check_fields_type(occ): | ||||
|             assert isinstance(occ.direct, LazyReference) | ||||
|             for elem in occ.in_list: | ||||
|                 assert isinstance(elem, LazyReference) | ||||
|             assert isinstance(occ.in_embedded.direct, LazyReference) | ||||
|             for elem in occ.in_embedded.in_list: | ||||
|                 assert isinstance(elem, LazyReference) | ||||
|  | ||||
|         occ = Ocurrence( | ||||
|             in_list=[animal1, animal2], | ||||
|             in_embedded={"in_list": [animal1, animal2], "direct": animal1}, | ||||
|             direct=animal1, | ||||
|         ).save() | ||||
|         check_fields_type(occ) | ||||
|         occ.reload() | ||||
|         check_fields_type(occ) | ||||
|         occ.direct = animal1.id | ||||
|         occ.in_list = [animal1.id, animal2.id] | ||||
|         occ.in_embedded.direct = animal1.id | ||||
|         occ.in_embedded.in_list = [animal1.id, animal2.id] | ||||
|         check_fields_type(occ) | ||||
|  | ||||
|  | ||||
| class TestGenericLazyReferenceField(MongoDBTestCase): | ||||
|     def test_generic_lazy_reference_simple(self): | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             tag = StringField() | ||||
|  | ||||
|         class Ocurrence(Document): | ||||
|             person = StringField() | ||||
|             animal = GenericLazyReferenceField() | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|         Ocurrence.drop_collection() | ||||
|  | ||||
|         animal = Animal(name="Leopard", tag="heavy").save() | ||||
|         Ocurrence(person="test", animal=animal).save() | ||||
|         p = Ocurrence.objects.get() | ||||
|         assert isinstance(p.animal, LazyReference) | ||||
|         fetched_animal = p.animal.fetch() | ||||
|         assert fetched_animal == animal | ||||
|         # `fetch` keep cache on referenced document by default... | ||||
|         animal.tag = "not so heavy" | ||||
|         animal.save() | ||||
|         double_fetch = p.animal.fetch() | ||||
|         assert fetched_animal is double_fetch | ||||
|         assert double_fetch.tag == "heavy" | ||||
|         # ...unless specified otherwise | ||||
|         fetch_force = p.animal.fetch(force=True) | ||||
|         assert fetch_force is not fetched_animal | ||||
|         assert fetch_force.tag == "not so heavy" | ||||
|  | ||||
|     def test_generic_lazy_reference_choices(self): | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         class Vegetal(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         class Mineral(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         class Ocurrence(Document): | ||||
|             living_thing = GenericLazyReferenceField(choices=[Animal, Vegetal]) | ||||
|             thing = GenericLazyReferenceField() | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|         Vegetal.drop_collection() | ||||
|         Mineral.drop_collection() | ||||
|         Ocurrence.drop_collection() | ||||
|  | ||||
|         animal = Animal(name="Leopard").save() | ||||
|         vegetal = Vegetal(name="Oak").save() | ||||
|         mineral = Mineral(name="Granite").save() | ||||
|  | ||||
|         occ_animal = Ocurrence(living_thing=animal, thing=animal).save() | ||||
|         occ_vegetal = Ocurrence(living_thing=vegetal, thing=vegetal).save() | ||||
|         with pytest.raises(ValidationError): | ||||
|             Ocurrence(living_thing=mineral).save() | ||||
|  | ||||
|         occ = Ocurrence.objects.get(living_thing=animal) | ||||
|         assert occ == occ_animal | ||||
|         assert isinstance(occ.thing, LazyReference) | ||||
|         assert isinstance(occ.living_thing, LazyReference) | ||||
|  | ||||
|         occ.thing = vegetal | ||||
|         occ.living_thing = vegetal | ||||
|         occ.save() | ||||
|  | ||||
|         occ.thing = mineral | ||||
|         occ.living_thing = mineral | ||||
|         with pytest.raises(ValidationError): | ||||
|             occ.save() | ||||
|  | ||||
|     def test_generic_lazy_reference_set(self): | ||||
|         class Animal(Document): | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|             name = StringField() | ||||
|             tag = StringField() | ||||
|  | ||||
|         class Ocurrence(Document): | ||||
|             person = StringField() | ||||
|             animal = GenericLazyReferenceField() | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|         Ocurrence.drop_collection() | ||||
|  | ||||
|         class SubAnimal(Animal): | ||||
|             nick = StringField() | ||||
|  | ||||
|         animal = Animal(name="Leopard", tag="heavy").save() | ||||
|         sub_animal = SubAnimal(nick="doggo", name="dog").save() | ||||
|         for ref in ( | ||||
|             animal, | ||||
|             LazyReference(Animal, animal.pk), | ||||
|             {"_cls": "Animal", "_ref": DBRef(animal._get_collection_name(), animal.pk)}, | ||||
|             sub_animal, | ||||
|             LazyReference(SubAnimal, sub_animal.pk), | ||||
|             { | ||||
|                 "_cls": "SubAnimal", | ||||
|                 "_ref": DBRef(sub_animal._get_collection_name(), sub_animal.pk), | ||||
|             }, | ||||
|         ): | ||||
|             p = Ocurrence(person="test", animal=ref).save() | ||||
|             p.reload() | ||||
|             assert isinstance(p.animal, (LazyReference, Document)) | ||||
|             p.animal.fetch() | ||||
|  | ||||
|     def test_generic_lazy_reference_bad_set(self): | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             tag = StringField() | ||||
|  | ||||
|         class Ocurrence(Document): | ||||
|             person = StringField() | ||||
|             animal = GenericLazyReferenceField(choices=["Animal"]) | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|         Ocurrence.drop_collection() | ||||
|  | ||||
|         class BadDoc(Document): | ||||
|             pass | ||||
|  | ||||
|         animal = Animal(name="Leopard", tag="heavy").save() | ||||
|         baddoc = BadDoc().save() | ||||
|         for bad in (42, "foo", baddoc, LazyReference(BadDoc, animal.pk)): | ||||
|             with pytest.raises(ValidationError): | ||||
|                 p = Ocurrence(person="test", animal=bad).save() | ||||
|  | ||||
|     def test_generic_lazy_reference_query_conversion(self): | ||||
|         class Member(Document): | ||||
|             user_num = IntField(primary_key=True) | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             title = StringField() | ||||
|             author = GenericLazyReferenceField() | ||||
|  | ||||
|         Member.drop_collection() | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         m1 = Member(user_num=1) | ||||
|         m1.save() | ||||
|         m2 = Member(user_num=2) | ||||
|         m2.save() | ||||
|  | ||||
|         post1 = BlogPost(title="post 1", author=m1) | ||||
|         post1.save() | ||||
|  | ||||
|         post2 = BlogPost(title="post 2", author=m2) | ||||
|         post2.save() | ||||
|  | ||||
|         post = BlogPost.objects(author=m1).first() | ||||
|         assert post.id == post1.id | ||||
|  | ||||
|         post = BlogPost.objects(author=m2).first() | ||||
|         assert post.id == post2.id | ||||
|  | ||||
|         # Same thing by passing a LazyReference instance | ||||
|         post = BlogPost.objects(author=LazyReference(Member, m2.pk)).first() | ||||
|         assert post.id == post2.id | ||||
|  | ||||
|     def test_generic_lazy_reference_not_set(self): | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             tag = StringField() | ||||
|  | ||||
|         class Ocurrence(Document): | ||||
|             person = StringField() | ||||
|             animal = GenericLazyReferenceField() | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|         Ocurrence.drop_collection() | ||||
|  | ||||
|         Ocurrence(person="foo").save() | ||||
|         p = Ocurrence.objects.get() | ||||
|         assert p.animal is None | ||||
|  | ||||
|     def test_generic_lazy_reference_accepts_string_instead_of_class(self): | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             tag = StringField() | ||||
|  | ||||
|         class Ocurrence(Document): | ||||
|             person = StringField() | ||||
|             animal = GenericLazyReferenceField("Animal") | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|         Ocurrence.drop_collection() | ||||
|  | ||||
|         animal = Animal().save() | ||||
|         Ocurrence(animal=animal).save() | ||||
|         p = Ocurrence.objects.get() | ||||
|         assert p.animal == animal | ||||
|  | ||||
|     def test_generic_lazy_reference_embedded(self): | ||||
|         class Animal(Document): | ||||
|             name = StringField() | ||||
|             tag = StringField() | ||||
|  | ||||
|         class EmbeddedOcurrence(EmbeddedDocument): | ||||
|             in_list = ListField(GenericLazyReferenceField()) | ||||
|             direct = GenericLazyReferenceField() | ||||
|  | ||||
|         class Ocurrence(Document): | ||||
|             in_list = ListField(GenericLazyReferenceField()) | ||||
|             in_embedded = EmbeddedDocumentField(EmbeddedOcurrence) | ||||
|             direct = GenericLazyReferenceField() | ||||
|  | ||||
|         Animal.drop_collection() | ||||
|         Ocurrence.drop_collection() | ||||
|  | ||||
|         animal1 = Animal(name="doggo").save() | ||||
|         animal2 = Animal(name="cheeta").save() | ||||
|  | ||||
|         def check_fields_type(occ): | ||||
|             assert isinstance(occ.direct, LazyReference) | ||||
|             for elem in occ.in_list: | ||||
|                 assert isinstance(elem, LazyReference) | ||||
|             assert isinstance(occ.in_embedded.direct, LazyReference) | ||||
|             for elem in occ.in_embedded.in_list: | ||||
|                 assert isinstance(elem, LazyReference) | ||||
|  | ||||
|         occ = Ocurrence( | ||||
|             in_list=[animal1, animal2], | ||||
|             in_embedded={"in_list": [animal1, animal2], "direct": animal1}, | ||||
|             direct=animal1, | ||||
|         ).save() | ||||
|         check_fields_type(occ) | ||||
|         occ.reload() | ||||
|         check_fields_type(occ) | ||||
|         animal1_ref = { | ||||
|             "_cls": "Animal", | ||||
|             "_ref": DBRef(animal1._get_collection_name(), animal1.pk), | ||||
|         } | ||||
|         animal2_ref = { | ||||
|             "_cls": "Animal", | ||||
|             "_ref": DBRef(animal2._get_collection_name(), animal2.pk), | ||||
|         } | ||||
|         occ.direct = animal1_ref | ||||
|         occ.in_list = [animal1_ref, animal2_ref] | ||||
|         occ.in_embedded.direct = animal1_ref | ||||
|         occ.in_embedded.in_list = [animal1_ref, animal2_ref] | ||||
|         check_fields_type(occ) | ||||
| @@ -1,63 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import pytest | ||||
| import six | ||||
|  | ||||
| try: | ||||
|     from bson.int64 import Int64 | ||||
| except ImportError: | ||||
|     Int64 = long | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_db | ||||
|  | ||||
| from tests.utils import MongoDBTestCase | ||||
|  | ||||
|  | ||||
| class TestLongField(MongoDBTestCase): | ||||
|     def test_long_field_is_considered_as_int64(self): | ||||
|         """ | ||||
|         Tests that long fields are stored as long in mongo, even if long | ||||
|         value is small enough to be an int. | ||||
|         """ | ||||
|  | ||||
|         class TestLongFieldConsideredAsInt64(Document): | ||||
|             some_long = LongField() | ||||
|  | ||||
|         doc = TestLongFieldConsideredAsInt64(some_long=42).save() | ||||
|         db = get_db() | ||||
|         assert isinstance( | ||||
|             db.test_long_field_considered_as_int64.find()[0]["some_long"], Int64 | ||||
|         ) | ||||
|         assert isinstance(doc.some_long, six.integer_types) | ||||
|  | ||||
|     def test_long_validation(self): | ||||
|         """Ensure that invalid values cannot be assigned to long fields. | ||||
|         """ | ||||
|  | ||||
|         class TestDocument(Document): | ||||
|             value = LongField(min_value=0, max_value=110) | ||||
|  | ||||
|         doc = TestDocument() | ||||
|         doc.value = 50 | ||||
|         doc.validate() | ||||
|  | ||||
|         doc.value = -1 | ||||
|         with pytest.raises(ValidationError): | ||||
|             doc.validate() | ||||
|         doc.value = 120 | ||||
|         with pytest.raises(ValidationError): | ||||
|             doc.validate() | ||||
|         doc.value = "ten" | ||||
|         with pytest.raises(ValidationError): | ||||
|             doc.validate() | ||||
|  | ||||
|     def test_long_ne_operator(self): | ||||
|         class TestDocument(Document): | ||||
|             long_fld = LongField() | ||||
|  | ||||
|         TestDocument.drop_collection() | ||||
|  | ||||
|         TestDocument(long_fld=None).save() | ||||
|         TestDocument(long_fld=1).save() | ||||
|  | ||||
|         assert 1 == TestDocument.objects(long_fld__ne=None).count() | ||||
| @@ -1,146 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import datetime | ||||
|  | ||||
| import pytest | ||||
|  | ||||
| from mongoengine import * | ||||
| from tests.utils import MongoDBTestCase | ||||
|  | ||||
|  | ||||
| class TestMapField(MongoDBTestCase): | ||||
|     def test_mapfield(self): | ||||
|         """Ensure that the MapField handles the declared type.""" | ||||
|  | ||||
|         class Simple(Document): | ||||
|             mapping = MapField(IntField()) | ||||
|  | ||||
|         Simple.drop_collection() | ||||
|  | ||||
|         e = Simple() | ||||
|         e.mapping["someint"] = 1 | ||||
|         e.save() | ||||
|  | ||||
|         with pytest.raises(ValidationError): | ||||
|             e.mapping["somestring"] = "abc" | ||||
|             e.save() | ||||
|  | ||||
|         with pytest.raises(ValidationError): | ||||
|  | ||||
|             class NoDeclaredType(Document): | ||||
|                 mapping = MapField() | ||||
|  | ||||
|     def test_complex_mapfield(self): | ||||
|         """Ensure that the MapField can handle complex declared types.""" | ||||
|  | ||||
|         class SettingBase(EmbeddedDocument): | ||||
|             meta = {"allow_inheritance": True} | ||||
|  | ||||
|         class StringSetting(SettingBase): | ||||
|             value = StringField() | ||||
|  | ||||
|         class IntegerSetting(SettingBase): | ||||
|             value = IntField() | ||||
|  | ||||
|         class Extensible(Document): | ||||
|             mapping = MapField(EmbeddedDocumentField(SettingBase)) | ||||
|  | ||||
|         Extensible.drop_collection() | ||||
|  | ||||
|         e = Extensible() | ||||
|         e.mapping["somestring"] = StringSetting(value="foo") | ||||
|         e.mapping["someint"] = IntegerSetting(value=42) | ||||
|         e.save() | ||||
|  | ||||
|         e2 = Extensible.objects.get(id=e.id) | ||||
|         assert isinstance(e2.mapping["somestring"], StringSetting) | ||||
|         assert isinstance(e2.mapping["someint"], IntegerSetting) | ||||
|  | ||||
|         with pytest.raises(ValidationError): | ||||
|             e.mapping["someint"] = 123 | ||||
|             e.save() | ||||
|  | ||||
|     def test_embedded_mapfield_db_field(self): | ||||
|         class Embedded(EmbeddedDocument): | ||||
|             number = IntField(default=0, db_field="i") | ||||
|  | ||||
|         class Test(Document): | ||||
|             my_map = MapField(field=EmbeddedDocumentField(Embedded), db_field="x") | ||||
|  | ||||
|         Test.drop_collection() | ||||
|  | ||||
|         test = Test() | ||||
|         test.my_map["DICTIONARY_KEY"] = Embedded(number=1) | ||||
|         test.save() | ||||
|  | ||||
|         Test.objects.update_one(inc__my_map__DICTIONARY_KEY__number=1) | ||||
|  | ||||
|         test = Test.objects.get() | ||||
|         assert test.my_map["DICTIONARY_KEY"].number == 2 | ||||
|         doc = self.db.test.find_one() | ||||
|         assert doc["x"]["DICTIONARY_KEY"]["i"] == 2 | ||||
|  | ||||
|     def test_mapfield_numerical_index(self): | ||||
|         """Ensure that MapField accept numeric strings as indexes.""" | ||||
|  | ||||
|         class Embedded(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|  | ||||
|         class Test(Document): | ||||
|             my_map = MapField(EmbeddedDocumentField(Embedded)) | ||||
|  | ||||
|         Test.drop_collection() | ||||
|  | ||||
|         test = Test() | ||||
|         test.my_map["1"] = Embedded(name="test") | ||||
|         test.save() | ||||
|         test.my_map["1"].name = "test updated" | ||||
|         test.save() | ||||
|  | ||||
|     def test_map_field_lookup(self): | ||||
|         """Ensure MapField lookups succeed on Fields without a lookup | ||||
|         method. | ||||
|         """ | ||||
|  | ||||
|         class Action(EmbeddedDocument): | ||||
|             operation = StringField() | ||||
|             object = StringField() | ||||
|  | ||||
|         class Log(Document): | ||||
|             name = StringField() | ||||
|             visited = MapField(DateTimeField()) | ||||
|             actions = MapField(EmbeddedDocumentField(Action)) | ||||
|  | ||||
|         Log.drop_collection() | ||||
|         Log( | ||||
|             name="wilson", | ||||
|             visited={"friends": datetime.datetime.now()}, | ||||
|             actions={"friends": Action(operation="drink", object="beer")}, | ||||
|         ).save() | ||||
|  | ||||
|         assert 1 == Log.objects(visited__friends__exists=True).count() | ||||
|  | ||||
|         assert ( | ||||
|             1 | ||||
|             == Log.objects( | ||||
|                 actions__friends__operation="drink", actions__friends__object="beer" | ||||
|             ).count() | ||||
|         ) | ||||
|  | ||||
|     def test_map_field_unicode(self): | ||||
|         class Info(EmbeddedDocument): | ||||
|             description = StringField() | ||||
|             value_list = ListField(field=StringField()) | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             info_dict = MapField(field=EmbeddedDocumentField(Info)) | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         tree = BlogPost(info_dict={u"éééé": {"description": u"VALUE: éééé"}}) | ||||
|  | ||||
|         tree.save() | ||||
|  | ||||
|         assert ( | ||||
|             BlogPost.objects.get(id=tree.id).info_dict[u"éééé"].description | ||||
|             == u"VALUE: éééé" | ||||
|         ) | ||||
| @@ -1,204 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| from bson import DBRef, SON | ||||
| import pytest | ||||
|  | ||||
| from mongoengine import * | ||||
| from tests.utils import MongoDBTestCase | ||||
|  | ||||
|  | ||||
| class TestReferenceField(MongoDBTestCase): | ||||
|     def test_reference_validation(self): | ||||
|         """Ensure that invalid document objects cannot be assigned to | ||||
|         reference fields. | ||||
|         """ | ||||
|  | ||||
|         class User(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             content = StringField() | ||||
|             author = ReferenceField(User) | ||||
|  | ||||
|         User.drop_collection() | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         # Make sure ReferenceField only accepts a document class or a string | ||||
|         # with a document class name. | ||||
|         with pytest.raises(ValidationError): | ||||
|             ReferenceField(EmbeddedDocument) | ||||
|  | ||||
|         user = User(name="Test User") | ||||
|  | ||||
|         # Ensure that the referenced object must have been saved | ||||
|         post1 = BlogPost(content="Chips and gravy taste good.") | ||||
|         post1.author = user | ||||
|         with pytest.raises(ValidationError): | ||||
|             post1.save() | ||||
|  | ||||
|         # Check that an invalid object type cannot be used | ||||
|         post2 = BlogPost(content="Chips and chilli taste good.") | ||||
|         post1.author = post2 | ||||
|         with pytest.raises(ValidationError): | ||||
|             post1.validate() | ||||
|  | ||||
|         # Ensure ObjectID's are accepted as references | ||||
|         user_object_id = user.pk | ||||
|         post3 = BlogPost(content="Chips and curry sauce taste good.") | ||||
|         post3.author = user_object_id | ||||
|         post3.save() | ||||
|  | ||||
|         # Make sure referencing a saved document of the right type works | ||||
|         user.save() | ||||
|         post1.author = user | ||||
|         post1.save() | ||||
|  | ||||
|         # Make sure referencing a saved document of the *wrong* type fails | ||||
|         post2.save() | ||||
|         post1.author = post2 | ||||
|         with pytest.raises(ValidationError): | ||||
|             post1.validate() | ||||
|  | ||||
|     def test_dbref_reference_fields(self): | ||||
|         """Make sure storing references as bson.dbref.DBRef works.""" | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             parent = ReferenceField("self", dbref=True) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         p1 = Person(name="John").save() | ||||
|         Person(name="Ross", parent=p1).save() | ||||
|  | ||||
|         assert Person._get_collection().find_one({"name": "Ross"})["parent"] == DBRef( | ||||
|             "person", p1.pk | ||||
|         ) | ||||
|  | ||||
|         p = Person.objects.get(name="Ross") | ||||
|         assert p.parent == p1 | ||||
|  | ||||
|     def test_dbref_to_mongo(self): | ||||
|         """Make sure that calling to_mongo on a ReferenceField which | ||||
|         has dbref=False, but actually actually contains a DBRef returns | ||||
|         an ID of that DBRef. | ||||
|         """ | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             parent = ReferenceField("self", dbref=False) | ||||
|  | ||||
|         p = Person(name="Steve", parent=DBRef("person", "abcdefghijklmnop")) | ||||
|         assert p.to_mongo() == SON([("name", u"Steve"), ("parent", "abcdefghijklmnop")]) | ||||
|  | ||||
|     def test_objectid_reference_fields(self): | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             parent = ReferenceField("self", dbref=False) | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         p1 = Person(name="John").save() | ||||
|         Person(name="Ross", parent=p1).save() | ||||
|  | ||||
|         col = Person._get_collection() | ||||
|         data = col.find_one({"name": "Ross"}) | ||||
|         assert data["parent"] == p1.pk | ||||
|  | ||||
|         p = Person.objects.get(name="Ross") | ||||
|         assert p.parent == p1 | ||||
|  | ||||
|     def test_undefined_reference(self): | ||||
|         """Ensure that ReferenceFields may reference undefined Documents. | ||||
|         """ | ||||
|  | ||||
|         class Product(Document): | ||||
|             name = StringField() | ||||
|             company = ReferenceField("Company") | ||||
|  | ||||
|         class Company(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         Product.drop_collection() | ||||
|         Company.drop_collection() | ||||
|  | ||||
|         ten_gen = Company(name="10gen") | ||||
|         ten_gen.save() | ||||
|         mongodb = Product(name="MongoDB", company=ten_gen) | ||||
|         mongodb.save() | ||||
|  | ||||
|         me = Product(name="MongoEngine") | ||||
|         me.save() | ||||
|  | ||||
|         obj = Product.objects(company=ten_gen).first() | ||||
|         assert obj == mongodb | ||||
|         assert obj.company == ten_gen | ||||
|  | ||||
|         obj = Product.objects(company=None).first() | ||||
|         assert obj == me | ||||
|  | ||||
|         obj = Product.objects.get(company=None) | ||||
|         assert obj == me | ||||
|  | ||||
|     def test_reference_query_conversion(self): | ||||
|         """Ensure that ReferenceFields can be queried using objects and values | ||||
|         of the type of the primary key of the referenced object. | ||||
|         """ | ||||
|  | ||||
|         class Member(Document): | ||||
|             user_num = IntField(primary_key=True) | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             title = StringField() | ||||
|             author = ReferenceField(Member, dbref=False) | ||||
|  | ||||
|         Member.drop_collection() | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         m1 = Member(user_num=1) | ||||
|         m1.save() | ||||
|         m2 = Member(user_num=2) | ||||
|         m2.save() | ||||
|  | ||||
|         post1 = BlogPost(title="post 1", author=m1) | ||||
|         post1.save() | ||||
|  | ||||
|         post2 = BlogPost(title="post 2", author=m2) | ||||
|         post2.save() | ||||
|  | ||||
|         post = BlogPost.objects(author=m1).first() | ||||
|         assert post.id == post1.id | ||||
|  | ||||
|         post = BlogPost.objects(author=m2).first() | ||||
|         assert post.id == post2.id | ||||
|  | ||||
|     def test_reference_query_conversion_dbref(self): | ||||
|         """Ensure that ReferenceFields can be queried using objects and values | ||||
|         of the type of the primary key of the referenced object. | ||||
|         """ | ||||
|  | ||||
|         class Member(Document): | ||||
|             user_num = IntField(primary_key=True) | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             title = StringField() | ||||
|             author = ReferenceField(Member, dbref=True) | ||||
|  | ||||
|         Member.drop_collection() | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         m1 = Member(user_num=1) | ||||
|         m1.save() | ||||
|         m2 = Member(user_num=2) | ||||
|         m2.save() | ||||
|  | ||||
|         post1 = BlogPost(title="post 1", author=m1) | ||||
|         post1.save() | ||||
|  | ||||
|         post2 = BlogPost(title="post 2", author=m2) | ||||
|         post2.save() | ||||
|  | ||||
|         post = BlogPost.objects(author=m1).first() | ||||
|         assert post.id == post1.id | ||||
|  | ||||
|         post = BlogPost.objects(author=m2).first() | ||||
|         assert post.id == post2.id | ||||
| @@ -1,278 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
|  | ||||
| from mongoengine import * | ||||
|  | ||||
| from tests.utils import MongoDBTestCase | ||||
|  | ||||
|  | ||||
| class TestSequenceField(MongoDBTestCase): | ||||
|     def test_sequence_field(self): | ||||
|         class Person(Document): | ||||
|             id = SequenceField(primary_key=True) | ||||
|             name = StringField() | ||||
|  | ||||
|         self.db["mongoengine.counters"].drop() | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         for x in range(10): | ||||
|             Person(name="Person %s" % x).save() | ||||
|  | ||||
|         c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) | ||||
|         assert c["next"] == 10 | ||||
|  | ||||
|         ids = [i.id for i in Person.objects] | ||||
|         assert ids == range(1, 11) | ||||
|  | ||||
|         c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) | ||||
|         assert c["next"] == 10 | ||||
|  | ||||
|         Person.id.set_next_value(1000) | ||||
|         c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) | ||||
|         assert c["next"] == 1000 | ||||
|  | ||||
|     def test_sequence_field_get_next_value(self): | ||||
|         class Person(Document): | ||||
|             id = SequenceField(primary_key=True) | ||||
|             name = StringField() | ||||
|  | ||||
|         self.db["mongoengine.counters"].drop() | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         for x in range(10): | ||||
|             Person(name="Person %s" % x).save() | ||||
|  | ||||
|         assert Person.id.get_next_value() == 11 | ||||
|         self.db["mongoengine.counters"].drop() | ||||
|  | ||||
|         assert Person.id.get_next_value() == 1 | ||||
|  | ||||
|         class Person(Document): | ||||
|             id = SequenceField(primary_key=True, value_decorator=str) | ||||
|             name = StringField() | ||||
|  | ||||
|         self.db["mongoengine.counters"].drop() | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         for x in range(10): | ||||
|             Person(name="Person %s" % x).save() | ||||
|  | ||||
|         assert Person.id.get_next_value() == "11" | ||||
|         self.db["mongoengine.counters"].drop() | ||||
|  | ||||
|         assert Person.id.get_next_value() == "1" | ||||
|  | ||||
|     def test_sequence_field_sequence_name(self): | ||||
|         class Person(Document): | ||||
|             id = SequenceField(primary_key=True, sequence_name="jelly") | ||||
|             name = StringField() | ||||
|  | ||||
|         self.db["mongoengine.counters"].drop() | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         for x in range(10): | ||||
|             Person(name="Person %s" % x).save() | ||||
|  | ||||
|         c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"}) | ||||
|         assert c["next"] == 10 | ||||
|  | ||||
|         ids = [i.id for i in Person.objects] | ||||
|         assert ids == range(1, 11) | ||||
|  | ||||
|         c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"}) | ||||
|         assert c["next"] == 10 | ||||
|  | ||||
|         Person.id.set_next_value(1000) | ||||
|         c = self.db["mongoengine.counters"].find_one({"_id": "jelly.id"}) | ||||
|         assert c["next"] == 1000 | ||||
|  | ||||
|     def test_multiple_sequence_fields(self): | ||||
|         class Person(Document): | ||||
|             id = SequenceField(primary_key=True) | ||||
|             counter = SequenceField() | ||||
|             name = StringField() | ||||
|  | ||||
|         self.db["mongoengine.counters"].drop() | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         for x in range(10): | ||||
|             Person(name="Person %s" % x).save() | ||||
|  | ||||
|         c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) | ||||
|         assert c["next"] == 10 | ||||
|  | ||||
|         ids = [i.id for i in Person.objects] | ||||
|         assert ids == range(1, 11) | ||||
|  | ||||
|         counters = [i.counter for i in Person.objects] | ||||
|         assert counters == range(1, 11) | ||||
|  | ||||
|         c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) | ||||
|         assert c["next"] == 10 | ||||
|  | ||||
|         Person.id.set_next_value(1000) | ||||
|         c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) | ||||
|         assert c["next"] == 1000 | ||||
|  | ||||
|         Person.counter.set_next_value(999) | ||||
|         c = self.db["mongoengine.counters"].find_one({"_id": "person.counter"}) | ||||
|         assert c["next"] == 999 | ||||
|  | ||||
|     def test_sequence_fields_reload(self): | ||||
|         class Animal(Document): | ||||
|             counter = SequenceField() | ||||
|             name = StringField() | ||||
|  | ||||
|         self.db["mongoengine.counters"].drop() | ||||
|         Animal.drop_collection() | ||||
|  | ||||
|         a = Animal(name="Boi").save() | ||||
|  | ||||
|         assert a.counter == 1 | ||||
|         a.reload() | ||||
|         assert a.counter == 1 | ||||
|  | ||||
|         a.counter = None | ||||
|         assert a.counter == 2 | ||||
|         a.save() | ||||
|  | ||||
|         assert a.counter == 2 | ||||
|  | ||||
|         a = Animal.objects.first() | ||||
|         assert a.counter == 2 | ||||
|         a.reload() | ||||
|         assert a.counter == 2 | ||||
|  | ||||
|     def test_multiple_sequence_fields_on_docs(self): | ||||
|         class Animal(Document): | ||||
|             id = SequenceField(primary_key=True) | ||||
|             name = StringField() | ||||
|  | ||||
|         class Person(Document): | ||||
|             id = SequenceField(primary_key=True) | ||||
|             name = StringField() | ||||
|  | ||||
|         self.db["mongoengine.counters"].drop() | ||||
|         Animal.drop_collection() | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         for x in range(10): | ||||
|             Animal(name="Animal %s" % x).save() | ||||
|             Person(name="Person %s" % x).save() | ||||
|  | ||||
|         c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) | ||||
|         assert c["next"] == 10 | ||||
|  | ||||
|         c = self.db["mongoengine.counters"].find_one({"_id": "animal.id"}) | ||||
|         assert c["next"] == 10 | ||||
|  | ||||
|         ids = [i.id for i in Person.objects] | ||||
|         assert ids == range(1, 11) | ||||
|  | ||||
|         id = [i.id for i in Animal.objects] | ||||
|         assert id == range(1, 11) | ||||
|  | ||||
|         c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) | ||||
|         assert c["next"] == 10 | ||||
|  | ||||
|         c = self.db["mongoengine.counters"].find_one({"_id": "animal.id"}) | ||||
|         assert c["next"] == 10 | ||||
|  | ||||
|     def test_sequence_field_value_decorator(self): | ||||
|         class Person(Document): | ||||
|             id = SequenceField(primary_key=True, value_decorator=str) | ||||
|             name = StringField() | ||||
|  | ||||
|         self.db["mongoengine.counters"].drop() | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         for x in range(10): | ||||
|             p = Person(name="Person %s" % x) | ||||
|             p.save() | ||||
|  | ||||
|         c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) | ||||
|         assert c["next"] == 10 | ||||
|  | ||||
|         ids = [i.id for i in Person.objects] | ||||
|         assert ids == map(str, range(1, 11)) | ||||
|  | ||||
|         c = self.db["mongoengine.counters"].find_one({"_id": "person.id"}) | ||||
|         assert c["next"] == 10 | ||||
|  | ||||
|     def test_embedded_sequence_field(self): | ||||
|         class Comment(EmbeddedDocument): | ||||
|             id = SequenceField() | ||||
|             content = StringField(required=True) | ||||
|  | ||||
|         class Post(Document): | ||||
|             title = StringField(required=True) | ||||
|             comments = ListField(EmbeddedDocumentField(Comment)) | ||||
|  | ||||
|         self.db["mongoengine.counters"].drop() | ||||
|         Post.drop_collection() | ||||
|  | ||||
|         Post( | ||||
|             title="MongoEngine", | ||||
|             comments=[ | ||||
|                 Comment(content="NoSQL Rocks"), | ||||
|                 Comment(content="MongoEngine Rocks"), | ||||
|             ], | ||||
|         ).save() | ||||
|         c = self.db["mongoengine.counters"].find_one({"_id": "comment.id"}) | ||||
|         assert c["next"] == 2 | ||||
|         post = Post.objects.first() | ||||
|         assert 1 == post.comments[0].id | ||||
|         assert 2 == post.comments[1].id | ||||
|  | ||||
|     def test_inherited_sequencefield(self): | ||||
|         class Base(Document): | ||||
|             name = StringField() | ||||
|             counter = SequenceField() | ||||
|             meta = {"abstract": True} | ||||
|  | ||||
|         class Foo(Base): | ||||
|             pass | ||||
|  | ||||
|         class Bar(Base): | ||||
|             pass | ||||
|  | ||||
|         bar = Bar(name="Bar") | ||||
|         bar.save() | ||||
|  | ||||
|         foo = Foo(name="Foo") | ||||
|         foo.save() | ||||
|  | ||||
|         assert "base.counter" in self.db["mongoengine.counters"].find().distinct("_id") | ||||
|         assert not ( | ||||
|             ("foo.counter" or "bar.counter") | ||||
|             in self.db["mongoengine.counters"].find().distinct("_id") | ||||
|         ) | ||||
|         assert foo.counter != bar.counter | ||||
|         assert foo._fields["counter"].owner_document == Base | ||||
|         assert bar._fields["counter"].owner_document == Base | ||||
|  | ||||
|     def test_no_inherited_sequencefield(self): | ||||
|         class Base(Document): | ||||
|             name = StringField() | ||||
|             meta = {"abstract": True} | ||||
|  | ||||
|         class Foo(Base): | ||||
|             counter = SequenceField() | ||||
|  | ||||
|         class Bar(Base): | ||||
|             counter = SequenceField() | ||||
|  | ||||
|         bar = Bar(name="Bar") | ||||
|         bar.save() | ||||
|  | ||||
|         foo = Foo(name="Foo") | ||||
|         foo.save() | ||||
|  | ||||
|         assert not ( | ||||
|             "base.counter" in self.db["mongoengine.counters"].find().distinct("_id") | ||||
|         ) | ||||
|         assert ("foo.counter" and "bar.counter") in self.db[ | ||||
|             "mongoengine.counters" | ||||
|         ].find().distinct("_id") | ||||
|         assert foo.counter == bar.counter | ||||
|         assert foo._fields["counter"].owner_document == Foo | ||||
|         assert bar._fields["counter"].owner_document == Bar | ||||
| @@ -1,67 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import pytest | ||||
|  | ||||
| from mongoengine import * | ||||
|  | ||||
| from tests.utils import MongoDBTestCase | ||||
|  | ||||
|  | ||||
| class TestURLField(MongoDBTestCase): | ||||
|     def test_validation(self): | ||||
|         """Ensure that URLFields validate urls properly.""" | ||||
|  | ||||
|         class Link(Document): | ||||
|             url = URLField() | ||||
|  | ||||
|         link = Link() | ||||
|         link.url = "google" | ||||
|         with pytest.raises(ValidationError): | ||||
|             link.validate() | ||||
|  | ||||
|         link.url = "http://www.google.com:8080" | ||||
|         link.validate() | ||||
|  | ||||
|     def test_unicode_url_validation(self): | ||||
|         """Ensure unicode URLs are validated properly.""" | ||||
|  | ||||
|         class Link(Document): | ||||
|             url = URLField() | ||||
|  | ||||
|         link = Link() | ||||
|         link.url = u"http://привет.com" | ||||
|  | ||||
|         # TODO fix URL validation - this *IS* a valid URL | ||||
|         # For now we just want to make sure that the error message is correct | ||||
|         with pytest.raises(ValidationError) as exc_info: | ||||
|             link.validate() | ||||
|         assert ( | ||||
|             unicode(exc_info.value) | ||||
|             == u"ValidationError (Link:None) (Invalid URL: http://\u043f\u0440\u0438\u0432\u0435\u0442.com: ['url'])" | ||||
|         ) | ||||
|  | ||||
|     def test_url_scheme_validation(self): | ||||
|         """Ensure that URLFields validate urls with specific schemes properly. | ||||
|         """ | ||||
|  | ||||
|         class Link(Document): | ||||
|             url = URLField() | ||||
|  | ||||
|         class SchemeLink(Document): | ||||
|             url = URLField(schemes=["ws", "irc"]) | ||||
|  | ||||
|         link = Link() | ||||
|         link.url = "ws://google.com" | ||||
|         with pytest.raises(ValidationError): | ||||
|             link.validate() | ||||
|  | ||||
|         scheme_link = SchemeLink() | ||||
|         scheme_link.url = "ws://google.com" | ||||
|         scheme_link.validate() | ||||
|  | ||||
|     def test_underscore_allowed_in_domains_names(self): | ||||
|         class Link(Document): | ||||
|             url = URLField() | ||||
|  | ||||
|         link = Link() | ||||
|         link.url = "https://san_leandro-ca.geebo.com" | ||||
|         link.validate() | ||||
| @@ -1,67 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import uuid | ||||
|  | ||||
| import pytest | ||||
|  | ||||
| from mongoengine import * | ||||
| from tests.utils import MongoDBTestCase, get_as_pymongo | ||||
|  | ||||
|  | ||||
| class Person(Document): | ||||
|     api_key = UUIDField(binary=False) | ||||
|  | ||||
|  | ||||
| class TestUUIDField(MongoDBTestCase): | ||||
|     def test_storage(self): | ||||
|         uid = uuid.uuid4() | ||||
|         person = Person(api_key=uid).save() | ||||
|         assert get_as_pymongo(person) == {"_id": person.id, "api_key": str(uid)} | ||||
|  | ||||
|     def test_field_string(self): | ||||
|         """Test UUID fields storing as String | ||||
|         """ | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         uu = uuid.uuid4() | ||||
|         Person(api_key=uu).save() | ||||
|         assert 1 == Person.objects(api_key=uu).count() | ||||
|         assert uu == Person.objects.first().api_key | ||||
|  | ||||
|         person = Person() | ||||
|         valid = (uuid.uuid4(), uuid.uuid1()) | ||||
|         for api_key in valid: | ||||
|             person.api_key = api_key | ||||
|             person.validate() | ||||
|  | ||||
|         invalid = ( | ||||
|             "9d159858-549b-4975-9f98-dd2f987c113g", | ||||
|             "9d159858-549b-4975-9f98-dd2f987c113", | ||||
|         ) | ||||
|         for api_key in invalid: | ||||
|             person.api_key = api_key | ||||
|             with pytest.raises(ValidationError): | ||||
|                 person.validate() | ||||
|  | ||||
|     def test_field_binary(self): | ||||
|         """Test UUID fields storing as Binary object.""" | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         uu = uuid.uuid4() | ||||
|         Person(api_key=uu).save() | ||||
|         assert 1 == Person.objects(api_key=uu).count() | ||||
|         assert uu == Person.objects.first().api_key | ||||
|  | ||||
|         person = Person() | ||||
|         valid = (uuid.uuid4(), uuid.uuid1()) | ||||
|         for api_key in valid: | ||||
|             person.api_key = api_key | ||||
|             person.validate() | ||||
|  | ||||
|         invalid = ( | ||||
|             "9d159858-549b-4975-9f98-dd2f987c113g", | ||||
|             "9d159858-549b-4975-9f98-dd2f987c113", | ||||
|         ) | ||||
|         for api_key in invalid: | ||||
|             person.api_key = api_key | ||||
|             with pytest.raises(ValidationError): | ||||
|                 person.validate() | ||||
| @@ -11,7 +11,7 @@ class PickleEmbedded(EmbeddedDocument): | ||||
|  | ||||
| class PickleTest(Document): | ||||
|     number = IntField() | ||||
|     string = StringField(choices=(("One", "1"), ("Two", "2"))) | ||||
|     string = StringField(choices=(('One', '1'), ('Two', '2'))) | ||||
|     embedded = EmbeddedDocumentField(PickleEmbedded) | ||||
|     lists = ListField(StringField()) | ||||
|     photo = FileField() | ||||
| @@ -19,7 +19,7 @@ class PickleTest(Document): | ||||
|  | ||||
| class NewDocumentPickleTest(Document): | ||||
|     number = IntField() | ||||
|     string = StringField(choices=(("One", "1"), ("Two", "2"))) | ||||
|     string = StringField(choices=(('One', '1'), ('Two', '2'))) | ||||
|     embedded = EmbeddedDocumentField(PickleEmbedded) | ||||
|     lists = ListField(StringField()) | ||||
|     photo = FileField() | ||||
| @@ -36,18 +36,17 @@ class PickleDynamicTest(DynamicDocument): | ||||
|  | ||||
| class PickleSignalsTest(Document): | ||||
|     number = IntField() | ||||
|     string = StringField(choices=(("One", "1"), ("Two", "2"))) | ||||
|     string = StringField(choices=(('One', '1'), ('Two', '2'))) | ||||
|     embedded = EmbeddedDocumentField(PickleEmbedded) | ||||
|     lists = ListField(StringField()) | ||||
|  | ||||
|     @classmethod | ||||
|     def post_save(self, sender, document, created, **kwargs): | ||||
|         pickle.dumps(document) | ||||
|         pickled = pickle.dumps(document) | ||||
|  | ||||
|     @classmethod | ||||
|     def post_delete(self, sender, document, **kwargs): | ||||
|         pickle.dumps(document) | ||||
|  | ||||
|         pickled = pickle.dumps(document) | ||||
|  | ||||
| signals.post_save.connect(PickleSignalsTest.post_save, sender=PickleSignalsTest) | ||||
| signals.post_delete.connect(PickleSignalsTest.post_delete, sender=PickleSignalsTest) | ||||
| @@ -58,4 +57,4 @@ class Mixin(object): | ||||
|  | ||||
|  | ||||
| class Base(Document): | ||||
|     meta = {"allow_inheritance": True} | ||||
|     meta = {'allow_inheritance': True} | ||||
|   | ||||
| @@ -0,0 +1,6 @@ | ||||
| from transform import * | ||||
| from field_list import * | ||||
| from queryset import * | ||||
| from visitor import * | ||||
| from geo import * | ||||
| from modify import * | ||||
							
								
								
									
										433
									
								
								tests/queryset/field_list.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										433
									
								
								tests/queryset/field_list.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,433 @@ | ||||
| import unittest | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine.queryset import QueryFieldList | ||||
|  | ||||
| __all__ = ("QueryFieldListTest", "OnlyExcludeAllTest") | ||||
|  | ||||
|  | ||||
| class QueryFieldListTest(unittest.TestCase): | ||||
|  | ||||
|     def test_empty(self): | ||||
|         q = QueryFieldList() | ||||
|         self.assertFalse(q) | ||||
|  | ||||
|         q = QueryFieldList(always_include=['_cls']) | ||||
|         self.assertFalse(q) | ||||
|  | ||||
|     def test_include_include(self): | ||||
|         q = QueryFieldList() | ||||
|         q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY, _only_called=True) | ||||
|         self.assertEqual(q.as_dict(), {'a': 1, 'b': 1}) | ||||
|         q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) | ||||
|         self.assertEqual(q.as_dict(), {'a': 1, 'b': 1, 'c': 1}) | ||||
|  | ||||
|     def test_include_exclude(self): | ||||
|         q = QueryFieldList() | ||||
|         q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.ONLY) | ||||
|         self.assertEqual(q.as_dict(), {'a': 1, 'b': 1}) | ||||
|         q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE) | ||||
|         self.assertEqual(q.as_dict(), {'a': 1}) | ||||
|  | ||||
|     def test_exclude_exclude(self): | ||||
|         q = QueryFieldList() | ||||
|         q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE) | ||||
|         self.assertEqual(q.as_dict(), {'a': 0, 'b': 0}) | ||||
|         q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.EXCLUDE) | ||||
|         self.assertEqual(q.as_dict(), {'a': 0, 'b': 0, 'c': 0}) | ||||
|  | ||||
|     def test_exclude_include(self): | ||||
|         q = QueryFieldList() | ||||
|         q += QueryFieldList(fields=['a', 'b'], value=QueryFieldList.EXCLUDE) | ||||
|         self.assertEqual(q.as_dict(), {'a': 0, 'b': 0}) | ||||
|         q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) | ||||
|         self.assertEqual(q.as_dict(), {'c': 1}) | ||||
|  | ||||
|     def test_always_include(self): | ||||
|         q = QueryFieldList(always_include=['x', 'y']) | ||||
|         q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE) | ||||
|         q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) | ||||
|         self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'c': 1}) | ||||
|  | ||||
|     def test_reset(self): | ||||
|         q = QueryFieldList(always_include=['x', 'y']) | ||||
|         q += QueryFieldList(fields=['a', 'b', 'x'], value=QueryFieldList.EXCLUDE) | ||||
|         q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) | ||||
|         self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'c': 1}) | ||||
|         q.reset() | ||||
|         self.assertFalse(q) | ||||
|         q += QueryFieldList(fields=['b', 'c'], value=QueryFieldList.ONLY) | ||||
|         self.assertEqual(q.as_dict(), {'x': 1, 'y': 1, 'b': 1, 'c': 1}) | ||||
|  | ||||
|     def test_using_a_slice(self): | ||||
|         q = QueryFieldList() | ||||
|         q += QueryFieldList(fields=['a'], value={"$slice": 5}) | ||||
|         self.assertEqual(q.as_dict(), {'a': {"$slice": 5}}) | ||||
|  | ||||
|  | ||||
| class OnlyExcludeAllTest(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         connect(db='mongoenginetest') | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             age = IntField() | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         Person.drop_collection() | ||||
|         self.Person = Person | ||||
|  | ||||
|     def test_mixing_only_exclude(self): | ||||
|  | ||||
|         class MyDoc(Document): | ||||
|             a = StringField() | ||||
|             b = StringField() | ||||
|             c = StringField() | ||||
|             d = StringField() | ||||
|             e = StringField() | ||||
|             f = StringField() | ||||
|  | ||||
|         include = ['a', 'b', 'c', 'd', 'e'] | ||||
|         exclude = ['d', 'e'] | ||||
|         only = ['b', 'c'] | ||||
|  | ||||
|         qs = MyDoc.objects.fields(**{i: 1 for i in include}) | ||||
|         self.assertEqual(qs._loaded_fields.as_dict(), | ||||
|                          {'a': 1, 'b': 1, 'c': 1, 'd': 1, 'e': 1}) | ||||
|         qs = qs.only(*only) | ||||
|         self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) | ||||
|         qs = qs.exclude(*exclude) | ||||
|         self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) | ||||
|  | ||||
|         qs = MyDoc.objects.fields(**{i: 1 for i in include}) | ||||
|         qs = qs.exclude(*exclude) | ||||
|         self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1}) | ||||
|         qs = qs.only(*only) | ||||
|         self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) | ||||
|  | ||||
|         qs = MyDoc.objects.exclude(*exclude) | ||||
|         qs = qs.fields(**{i: 1 for i in include}) | ||||
|         self.assertEqual(qs._loaded_fields.as_dict(), {'a': 1, 'b': 1, 'c': 1}) | ||||
|         qs = qs.only(*only) | ||||
|         self.assertEqual(qs._loaded_fields.as_dict(), {'b': 1, 'c': 1}) | ||||
|  | ||||
|     def test_slicing(self): | ||||
|  | ||||
|         class MyDoc(Document): | ||||
|             a = ListField() | ||||
|             b = ListField() | ||||
|             c = ListField() | ||||
|             d = ListField() | ||||
|             e = ListField() | ||||
|             f = ListField() | ||||
|  | ||||
|         include = ['a', 'b', 'c', 'd', 'e'] | ||||
|         exclude = ['d', 'e'] | ||||
|         only = ['b', 'c'] | ||||
|  | ||||
|         qs = MyDoc.objects.fields(**{i: 1 for i in include}) | ||||
|         qs = qs.exclude(*exclude) | ||||
|         qs = qs.only(*only) | ||||
|         qs = qs.fields(slice__b=5) | ||||
|         self.assertEqual(qs._loaded_fields.as_dict(), | ||||
|                          {'b': {'$slice': 5}, 'c': 1}) | ||||
|  | ||||
|         qs = qs.fields(slice__c=[5, 1]) | ||||
|         self.assertEqual(qs._loaded_fields.as_dict(), | ||||
|                          {'b': {'$slice': 5}, 'c': {'$slice': [5, 1]}}) | ||||
|  | ||||
|         qs = qs.exclude('c') | ||||
|         self.assertEqual(qs._loaded_fields.as_dict(), | ||||
|                          {'b': {'$slice': 5}}) | ||||
|  | ||||
|     def test_mix_slice_with_other_fields(self): | ||||
|         class MyDoc(Document): | ||||
|             a = ListField() | ||||
|             b = ListField() | ||||
|             c = ListField() | ||||
|  | ||||
|         qs = MyDoc.objects.fields(a=1, b=0, slice__c=2) | ||||
|         self.assertEqual(qs._loaded_fields.as_dict(), | ||||
|                          {'c': {'$slice': 2}, 'a': 1}) | ||||
|  | ||||
|     def test_only(self): | ||||
|         """Ensure that QuerySet.only only returns the requested fields. | ||||
|         """ | ||||
|         person = self.Person(name='test', age=25) | ||||
|         person.save() | ||||
|  | ||||
|         obj = self.Person.objects.only('name').get() | ||||
|         self.assertEqual(obj.name, person.name) | ||||
|         self.assertEqual(obj.age, None) | ||||
|  | ||||
|         obj = self.Person.objects.only('age').get() | ||||
|         self.assertEqual(obj.name, None) | ||||
|         self.assertEqual(obj.age, person.age) | ||||
|  | ||||
|         obj = self.Person.objects.only('name', 'age').get() | ||||
|         self.assertEqual(obj.name, person.name) | ||||
|         self.assertEqual(obj.age, person.age) | ||||
|  | ||||
|         obj = self.Person.objects.only(*('id', 'name',)).get() | ||||
|         self.assertEqual(obj.name, person.name) | ||||
|         self.assertEqual(obj.age, None) | ||||
|  | ||||
|         # Check polymorphism still works | ||||
|         class Employee(self.Person): | ||||
|             salary = IntField(db_field='wage') | ||||
|  | ||||
|         employee = Employee(name='test employee', age=40, salary=30000) | ||||
|         employee.save() | ||||
|  | ||||
|         obj = self.Person.objects(id=employee.id).only('age').get() | ||||
|         self.assertTrue(isinstance(obj, Employee)) | ||||
|  | ||||
|         # Check field names are looked up properly | ||||
|         obj = Employee.objects(id=employee.id).only('salary').get() | ||||
|         self.assertEqual(obj.salary, employee.salary) | ||||
|         self.assertEqual(obj.name, None) | ||||
|  | ||||
|     def test_only_with_subfields(self): | ||||
|         class User(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|             email = StringField() | ||||
|  | ||||
|         class Comment(EmbeddedDocument): | ||||
|             title = StringField() | ||||
|             text = StringField() | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             content = StringField() | ||||
|             author = EmbeddedDocumentField(User) | ||||
|             comments = ListField(EmbeddedDocumentField(Comment)) | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         post = BlogPost(content='Had a good coffee today...') | ||||
|         post.author = User(name='Test User') | ||||
|         post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')] | ||||
|         post.save() | ||||
|  | ||||
|         obj = BlogPost.objects.only('author.name',).get() | ||||
|         self.assertEqual(obj.content, None) | ||||
|         self.assertEqual(obj.author.email, None) | ||||
|         self.assertEqual(obj.author.name, 'Test User') | ||||
|         self.assertEqual(obj.comments, []) | ||||
|  | ||||
|         obj = BlogPost.objects.only('content', 'comments.title',).get() | ||||
|         self.assertEqual(obj.content, 'Had a good coffee today...') | ||||
|         self.assertEqual(obj.author, None) | ||||
|         self.assertEqual(obj.comments[0].title, 'I aggree') | ||||
|         self.assertEqual(obj.comments[1].title, 'Coffee') | ||||
|         self.assertEqual(obj.comments[0].text, None) | ||||
|         self.assertEqual(obj.comments[1].text, None) | ||||
|  | ||||
|         obj = BlogPost.objects.only('comments',).get() | ||||
|         self.assertEqual(obj.content, None) | ||||
|         self.assertEqual(obj.author, None) | ||||
|         self.assertEqual(obj.comments[0].title, 'I aggree') | ||||
|         self.assertEqual(obj.comments[1].title, 'Coffee') | ||||
|         self.assertEqual(obj.comments[0].text, 'Great post!') | ||||
|         self.assertEqual(obj.comments[1].text, 'I hate coffee') | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|     def test_exclude(self): | ||||
|         class User(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|             email = StringField() | ||||
|  | ||||
|         class Comment(EmbeddedDocument): | ||||
|             title = StringField() | ||||
|             text = StringField() | ||||
|  | ||||
|         class BlogPost(Document): | ||||
|             content = StringField() | ||||
|             author = EmbeddedDocumentField(User) | ||||
|             comments = ListField(EmbeddedDocumentField(Comment)) | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|         post = BlogPost(content='Had a good coffee today...') | ||||
|         post.author = User(name='Test User') | ||||
|         post.comments = [Comment(title='I aggree', text='Great post!'), Comment(title='Coffee', text='I hate coffee')] | ||||
|         post.save() | ||||
|  | ||||
|         obj = BlogPost.objects.exclude('author', 'comments.text').get() | ||||
|         self.assertEqual(obj.author, None) | ||||
|         self.assertEqual(obj.content, 'Had a good coffee today...') | ||||
|         self.assertEqual(obj.comments[0].title, 'I aggree') | ||||
|         self.assertEqual(obj.comments[0].text, None) | ||||
|  | ||||
|         BlogPost.drop_collection() | ||||
|  | ||||
|     def test_exclude_only_combining(self): | ||||
|         class Attachment(EmbeddedDocument): | ||||
|             name = StringField() | ||||
|             content = StringField() | ||||
|  | ||||
|         class Email(Document): | ||||
|             sender = StringField() | ||||
|             to = StringField() | ||||
|             subject = StringField() | ||||
|             body = StringField() | ||||
|             content_type = StringField() | ||||
|             attachments = ListField(EmbeddedDocumentField(Attachment)) | ||||
|  | ||||
|         Email.drop_collection() | ||||
|         email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain') | ||||
|         email.attachments = [ | ||||
|             Attachment(name='file1.doc', content='ABC'), | ||||
|             Attachment(name='file2.doc', content='XYZ'), | ||||
|         ] | ||||
|         email.save() | ||||
|  | ||||
|         obj = Email.objects.exclude('content_type').exclude('body').get() | ||||
|         self.assertEqual(obj.sender, 'me') | ||||
|         self.assertEqual(obj.to, 'you') | ||||
|         self.assertEqual(obj.subject, 'From Russia with Love') | ||||
|         self.assertEqual(obj.body, None) | ||||
|         self.assertEqual(obj.content_type, None) | ||||
|  | ||||
|         obj = Email.objects.only('sender', 'to').exclude('body', 'sender').get() | ||||
|         self.assertEqual(obj.sender, None) | ||||
|         self.assertEqual(obj.to, 'you') | ||||
|         self.assertEqual(obj.subject, None) | ||||
|         self.assertEqual(obj.body, None) | ||||
|         self.assertEqual(obj.content_type, None) | ||||
|  | ||||
|         obj = Email.objects.exclude('attachments.content').exclude('body').only('to', 'attachments.name').get() | ||||
|         self.assertEqual(obj.attachments[0].name, 'file1.doc') | ||||
|         self.assertEqual(obj.attachments[0].content, None) | ||||
|         self.assertEqual(obj.sender, None) | ||||
|         self.assertEqual(obj.to, 'you') | ||||
|         self.assertEqual(obj.subject, None) | ||||
|         self.assertEqual(obj.body, None) | ||||
|         self.assertEqual(obj.content_type, None) | ||||
|  | ||||
|         Email.drop_collection() | ||||
|  | ||||
|     def test_all_fields(self): | ||||
|  | ||||
|         class Email(Document): | ||||
|             sender = StringField() | ||||
|             to = StringField() | ||||
|             subject = StringField() | ||||
|             body = StringField() | ||||
|             content_type = StringField() | ||||
|  | ||||
|         Email.drop_collection() | ||||
|  | ||||
|         email = Email(sender='me', to='you', subject='From Russia with Love', body='Hello!', content_type='text/plain') | ||||
|         email.save() | ||||
|  | ||||
|         obj = Email.objects.exclude('content_type', 'body').only('to', 'body').all_fields().get() | ||||
|         self.assertEqual(obj.sender, 'me') | ||||
|         self.assertEqual(obj.to, 'you') | ||||
|         self.assertEqual(obj.subject, 'From Russia with Love') | ||||
|         self.assertEqual(obj.body, 'Hello!') | ||||
|         self.assertEqual(obj.content_type, 'text/plain') | ||||
|  | ||||
|         Email.drop_collection() | ||||
|  | ||||
|     def test_slicing_fields(self): | ||||
|         """Ensure that query slicing an array works. | ||||
|         """ | ||||
|         class Numbers(Document): | ||||
|             n = ListField(IntField()) | ||||
|  | ||||
|         Numbers.drop_collection() | ||||
|  | ||||
|         numbers = Numbers(n=[0, 1, 2, 3, 4, 5, -5, -4, -3, -2, -1]) | ||||
|         numbers.save() | ||||
|  | ||||
|         # first three | ||||
|         numbers = Numbers.objects.fields(slice__n=3).get() | ||||
|         self.assertEqual(numbers.n, [0, 1, 2]) | ||||
|  | ||||
|         # last three | ||||
|         numbers = Numbers.objects.fields(slice__n=-3).get() | ||||
|         self.assertEqual(numbers.n, [-3, -2, -1]) | ||||
|  | ||||
|         # skip 2, limit 3 | ||||
|         numbers = Numbers.objects.fields(slice__n=[2, 3]).get() | ||||
|         self.assertEqual(numbers.n, [2, 3, 4]) | ||||
|  | ||||
|         # skip to fifth from last, limit 4 | ||||
|         numbers = Numbers.objects.fields(slice__n=[-5, 4]).get() | ||||
|         self.assertEqual(numbers.n, [-5, -4, -3, -2]) | ||||
|  | ||||
|         # skip to fifth from last, limit 10 | ||||
|         numbers = Numbers.objects.fields(slice__n=[-5, 10]).get() | ||||
|         self.assertEqual(numbers.n, [-5, -4, -3, -2, -1]) | ||||
|  | ||||
|         # skip to fifth from last, limit 10 dict method | ||||
|         numbers = Numbers.objects.fields(n={"$slice": [-5, 10]}).get() | ||||
|         self.assertEqual(numbers.n, [-5, -4, -3, -2, -1]) | ||||
|  | ||||
|     def test_slicing_nested_fields(self): | ||||
|         """Ensure that query slicing an embedded array works. | ||||
|         """ | ||||
|  | ||||
|         class EmbeddedNumber(EmbeddedDocument): | ||||
|             n = ListField(IntField()) | ||||
|  | ||||
|         class Numbers(Document): | ||||
|             embedded = EmbeddedDocumentField(EmbeddedNumber) | ||||
|  | ||||
|         Numbers.drop_collection() | ||||
|  | ||||
|         numbers = Numbers() | ||||
|         numbers.embedded = EmbeddedNumber(n=[0, 1, 2, 3, 4, 5, -5, -4, -3, -2, -1]) | ||||
|         numbers.save() | ||||
|  | ||||
|         # first three | ||||
|         numbers = Numbers.objects.fields(slice__embedded__n=3).get() | ||||
|         self.assertEqual(numbers.embedded.n, [0, 1, 2]) | ||||
|  | ||||
|         # last three | ||||
|         numbers = Numbers.objects.fields(slice__embedded__n=-3).get() | ||||
|         self.assertEqual(numbers.embedded.n, [-3, -2, -1]) | ||||
|  | ||||
|         # skip 2, limit 3 | ||||
|         numbers = Numbers.objects.fields(slice__embedded__n=[2, 3]).get() | ||||
|         self.assertEqual(numbers.embedded.n, [2, 3, 4]) | ||||
|  | ||||
|         # skip to fifth from last, limit 4 | ||||
|         numbers = Numbers.objects.fields(slice__embedded__n=[-5, 4]).get() | ||||
|         self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2]) | ||||
|  | ||||
|         # skip to fifth from last, limit 10 | ||||
|         numbers = Numbers.objects.fields(slice__embedded__n=[-5, 10]).get() | ||||
|         self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1]) | ||||
|  | ||||
|         # skip to fifth from last, limit 10 dict method | ||||
|         numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get() | ||||
|         self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1]) | ||||
|  | ||||
|  | ||||
|     def test_exclude_from_subclasses_docs(self): | ||||
|  | ||||
|         class Base(Document): | ||||
|             username = StringField() | ||||
|  | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         class Anon(Base): | ||||
|             anon = BooleanField() | ||||
|  | ||||
|         class User(Base): | ||||
|             password = StringField() | ||||
|             wibble = StringField() | ||||
|  | ||||
|         Base.drop_collection() | ||||
|         User(username="mongodb", password="secret").save() | ||||
|  | ||||
|         user = Base.objects().exclude("password", "wibble").first() | ||||
|         self.assertEqual(user.password, None) | ||||
|  | ||||
|         self.assertRaises(LookUpError, Base.objects.exclude, "made_up") | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
| @@ -3,13 +3,16 @@ import unittest | ||||
| 
 | ||||
| from mongoengine import * | ||||
| 
 | ||||
| from tests.utils import MongoDBTestCase | ||||
| from tests.utils import MongoDBTestCase, needs_mongodb_v3 | ||||
| 
 | ||||
| 
 | ||||
| class TestGeoQueries(MongoDBTestCase): | ||||
| __all__ = ("GeoQueriesTest",) | ||||
| 
 | ||||
| 
 | ||||
| class GeoQueriesTest(MongoDBTestCase): | ||||
| 
 | ||||
|     def _create_event_data(self, point_field_class=GeoPointField): | ||||
|         """Create some sample data re-used in many of the tests below.""" | ||||
| 
 | ||||
|         class Event(Document): | ||||
|             title = StringField() | ||||
|             date = DateTimeField() | ||||
| @@ -25,18 +28,15 @@ class TestGeoQueries(MongoDBTestCase): | ||||
|         event1 = Event.objects.create( | ||||
|             title="Coltrane Motion @ Double Door", | ||||
|             date=datetime.datetime.now() - datetime.timedelta(days=1), | ||||
|             location=[-87.677137, 41.909889], | ||||
|         ) | ||||
|             location=[-87.677137, 41.909889]) | ||||
|         event2 = Event.objects.create( | ||||
|             title="Coltrane Motion @ Bottom of the Hill", | ||||
|             date=datetime.datetime.now() - datetime.timedelta(days=10), | ||||
|             location=[-122.4194155, 37.7749295], | ||||
|         ) | ||||
|             location=[-122.4194155, 37.7749295]) | ||||
|         event3 = Event.objects.create( | ||||
|             title="Coltrane Motion @ Empty Bottle", | ||||
|             date=datetime.datetime.now(), | ||||
|             location=[-87.686638, 41.900474], | ||||
|         ) | ||||
|             location=[-87.686638, 41.900474]) | ||||
| 
 | ||||
|         return event1, event2, event3 | ||||
| 
 | ||||
| @@ -48,14 +48,14 @@ class TestGeoQueries(MongoDBTestCase): | ||||
|         # note that "near" will show the san francisco event, too, | ||||
|         # although it sorts to last. | ||||
|         events = self.Event.objects(location__near=[-87.67892, 41.9120459]) | ||||
|         assert events.count() == 3 | ||||
|         assert list(events) == [event1, event3, event2] | ||||
|         self.assertEqual(events.count(), 3) | ||||
|         self.assertEqual(list(events), [event1, event3, event2]) | ||||
| 
 | ||||
|         # ensure ordering is respected by "near" | ||||
|         events = self.Event.objects(location__near=[-87.67892, 41.9120459]) | ||||
|         events = events.order_by("-date") | ||||
|         assert events.count() == 3 | ||||
|         assert list(events) == [event3, event1, event2] | ||||
|         self.assertEqual(events.count(), 3) | ||||
|         self.assertEqual(list(events), [event3, event1, event2]) | ||||
| 
 | ||||
|     def test_near_and_max_distance(self): | ||||
|         """Ensure the "max_distance" operator works alongside the "near" | ||||
| @@ -65,10 +65,14 @@ class TestGeoQueries(MongoDBTestCase): | ||||
| 
 | ||||
|         # find events within 10 degrees of san francisco | ||||
|         point = [-122.415579, 37.7566023] | ||||
|         events = self.Event.objects(location__near=point, location__max_distance=10) | ||||
|         assert events.count() == 1 | ||||
|         assert events[0] == event2 | ||||
|         events = self.Event.objects(location__near=point, | ||||
|                                     location__max_distance=10) | ||||
|         self.assertEqual(events.count(), 1) | ||||
|         self.assertEqual(events[0], event2) | ||||
| 
 | ||||
|     # $minDistance was added in MongoDB v2.6, but continued being buggy | ||||
|     # until v3.0; skip for older versions | ||||
|     @needs_mongodb_v3 | ||||
|     def test_near_and_min_distance(self): | ||||
|         """Ensure the "min_distance" operator works alongside the "near" | ||||
|         operator. | ||||
| @@ -77,8 +81,9 @@ class TestGeoQueries(MongoDBTestCase): | ||||
| 
 | ||||
|         # find events at least 10 degrees away of san francisco | ||||
|         point = [-122.415579, 37.7566023] | ||||
|         events = self.Event.objects(location__near=point, location__min_distance=10) | ||||
|         assert events.count() == 2 | ||||
|         events = self.Event.objects(location__near=point, | ||||
|                                     location__min_distance=10) | ||||
|         self.assertEqual(events.count(), 2) | ||||
| 
 | ||||
|     def test_within_distance(self): | ||||
|         """Make sure the "within_distance" operator works.""" | ||||
| @@ -86,30 +91,34 @@ class TestGeoQueries(MongoDBTestCase): | ||||
| 
 | ||||
|         # find events within 5 degrees of pitchfork office, chicago | ||||
|         point_and_distance = [[-87.67892, 41.9120459], 5] | ||||
|         events = self.Event.objects(location__within_distance=point_and_distance) | ||||
|         assert events.count() == 2 | ||||
|         events = self.Event.objects( | ||||
|             location__within_distance=point_and_distance) | ||||
|         self.assertEqual(events.count(), 2) | ||||
|         events = list(events) | ||||
|         assert event2 not in events | ||||
|         assert event1 in events | ||||
|         assert event3 in events | ||||
|         self.assertTrue(event2 not in events) | ||||
|         self.assertTrue(event1 in events) | ||||
|         self.assertTrue(event3 in events) | ||||
| 
 | ||||
|         # find events within 10 degrees of san francisco | ||||
|         point_and_distance = [[-122.415579, 37.7566023], 10] | ||||
|         events = self.Event.objects(location__within_distance=point_and_distance) | ||||
|         assert events.count() == 1 | ||||
|         assert events[0] == event2 | ||||
|         events = self.Event.objects( | ||||
|             location__within_distance=point_and_distance) | ||||
|         self.assertEqual(events.count(), 1) | ||||
|         self.assertEqual(events[0], event2) | ||||
| 
 | ||||
|         # find events within 1 degree of greenpoint, broolyn, nyc, ny | ||||
|         point_and_distance = [[-73.9509714, 40.7237134], 1] | ||||
|         events = self.Event.objects(location__within_distance=point_and_distance) | ||||
|         assert events.count() == 0 | ||||
|         events = self.Event.objects( | ||||
|             location__within_distance=point_and_distance) | ||||
|         self.assertEqual(events.count(), 0) | ||||
| 
 | ||||
|         # ensure ordering is respected by "within_distance" | ||||
|         point_and_distance = [[-87.67892, 41.9120459], 10] | ||||
|         events = self.Event.objects(location__within_distance=point_and_distance) | ||||
|         events = self.Event.objects( | ||||
|             location__within_distance=point_and_distance) | ||||
|         events = events.order_by("-date") | ||||
|         assert events.count() == 2 | ||||
|         assert events[0] == event3 | ||||
|         self.assertEqual(events.count(), 2) | ||||
|         self.assertEqual(events[0], event3) | ||||
| 
 | ||||
|     def test_within_box(self): | ||||
|         """Ensure the "within_box" operator works.""" | ||||
| @@ -118,8 +127,8 @@ class TestGeoQueries(MongoDBTestCase): | ||||
|         # check that within_box works | ||||
|         box = [(-125.0, 35.0), (-100.0, 40.0)] | ||||
|         events = self.Event.objects(location__within_box=box) | ||||
|         assert events.count() == 1 | ||||
|         assert events[0].id == event2.id | ||||
|         self.assertEqual(events.count(), 1) | ||||
|         self.assertEqual(events[0].id, event2.id) | ||||
| 
 | ||||
|     def test_within_polygon(self): | ||||
|         """Ensure the "within_polygon" operator works.""" | ||||
| @@ -133,78 +142,87 @@ class TestGeoQueries(MongoDBTestCase): | ||||
|             (-87.656164, 41.898061), | ||||
|         ] | ||||
|         events = self.Event.objects(location__within_polygon=polygon) | ||||
|         assert events.count() == 1 | ||||
|         assert events[0].id == event1.id | ||||
|         self.assertEqual(events.count(), 1) | ||||
|         self.assertEqual(events[0].id, event1.id) | ||||
| 
 | ||||
|         polygon2 = [ | ||||
|             (-1.742249, 54.033586), | ||||
|             (-1.225891, 52.792797), | ||||
|             (-4.40094, 53.389881), | ||||
|             (-4.40094, 53.389881) | ||||
|         ] | ||||
|         events = self.Event.objects(location__within_polygon=polygon2) | ||||
|         assert events.count() == 0 | ||||
|         self.assertEqual(events.count(), 0) | ||||
| 
 | ||||
|     def test_2dsphere_near(self): | ||||
|         """Make sure the "near" operator works with a PointField, which | ||||
|         corresponds to a 2dsphere index. | ||||
|         """ | ||||
|         event1, event2, event3 = self._create_event_data(point_field_class=PointField) | ||||
|         event1, event2, event3 = self._create_event_data( | ||||
|             point_field_class=PointField | ||||
|         ) | ||||
| 
 | ||||
|         # find all events "near" pitchfork office, chicago. | ||||
|         # note that "near" will show the san francisco event, too, | ||||
|         # although it sorts to last. | ||||
|         events = self.Event.objects(location__near=[-87.67892, 41.9120459]) | ||||
|         assert events.count() == 3 | ||||
|         assert list(events) == [event1, event3, event2] | ||||
|         self.assertEqual(events.count(), 3) | ||||
|         self.assertEqual(list(events), [event1, event3, event2]) | ||||
| 
 | ||||
|         # ensure ordering is respected by "near" | ||||
|         events = self.Event.objects(location__near=[-87.67892, 41.9120459]) | ||||
|         events = events.order_by("-date") | ||||
|         assert events.count() == 3 | ||||
|         assert list(events) == [event3, event1, event2] | ||||
|         self.assertEqual(events.count(), 3) | ||||
|         self.assertEqual(list(events), [event3, event1, event2]) | ||||
| 
 | ||||
|     def test_2dsphere_near_and_max_distance(self): | ||||
|         """Ensure the "max_distance" operator works alongside the "near" | ||||
|         operator with a 2dsphere index. | ||||
|         """ | ||||
|         event1, event2, event3 = self._create_event_data(point_field_class=PointField) | ||||
|         event1, event2, event3 = self._create_event_data( | ||||
|             point_field_class=PointField | ||||
|         ) | ||||
| 
 | ||||
|         # find events within 10km of san francisco | ||||
|         point = [-122.415579, 37.7566023] | ||||
|         events = self.Event.objects(location__near=point, location__max_distance=10000) | ||||
|         assert events.count() == 1 | ||||
|         assert events[0] == event2 | ||||
|         events = self.Event.objects(location__near=point, | ||||
|                                     location__max_distance=10000) | ||||
|         self.assertEqual(events.count(), 1) | ||||
|         self.assertEqual(events[0], event2) | ||||
| 
 | ||||
|         # find events within 1km of greenpoint, broolyn, nyc, ny | ||||
|         events = self.Event.objects( | ||||
|             location__near=[-73.9509714, 40.7237134], location__max_distance=1000 | ||||
|         ) | ||||
|         assert events.count() == 0 | ||||
|         events = self.Event.objects(location__near=[-73.9509714, 40.7237134], | ||||
|                                     location__max_distance=1000) | ||||
|         self.assertEqual(events.count(), 0) | ||||
| 
 | ||||
|         # ensure ordering is respected by "near" | ||||
|         events = self.Event.objects( | ||||
|             location__near=[-87.67892, 41.9120459], location__max_distance=10000 | ||||
|             location__near=[-87.67892, 41.9120459], | ||||
|             location__max_distance=10000 | ||||
|         ).order_by("-date") | ||||
|         assert events.count() == 2 | ||||
|         assert events[0] == event3 | ||||
|         self.assertEqual(events.count(), 2) | ||||
|         self.assertEqual(events[0], event3) | ||||
| 
 | ||||
|     def test_2dsphere_geo_within_box(self): | ||||
|         """Ensure the "geo_within_box" operator works with a 2dsphere | ||||
|         index. | ||||
|         """ | ||||
|         event1, event2, event3 = self._create_event_data(point_field_class=PointField) | ||||
|         event1, event2, event3 = self._create_event_data( | ||||
|             point_field_class=PointField | ||||
|         ) | ||||
| 
 | ||||
|         # check that within_box works | ||||
|         box = [(-125.0, 35.0), (-100.0, 40.0)] | ||||
|         events = self.Event.objects(location__geo_within_box=box) | ||||
|         assert events.count() == 1 | ||||
|         assert events[0].id == event2.id | ||||
|         self.assertEqual(events.count(), 1) | ||||
|         self.assertEqual(events[0].id, event2.id) | ||||
| 
 | ||||
|     def test_2dsphere_geo_within_polygon(self): | ||||
|         """Ensure the "geo_within_polygon" operator works with a | ||||
|         2dsphere index. | ||||
|         """ | ||||
|         event1, event2, event3 = self._create_event_data(point_field_class=PointField) | ||||
|         event1, event2, event3 = self._create_event_data( | ||||
|             point_field_class=PointField | ||||
|         ) | ||||
| 
 | ||||
|         polygon = [ | ||||
|             (-87.694445, 41.912114), | ||||
| @@ -214,59 +232,67 @@ class TestGeoQueries(MongoDBTestCase): | ||||
|             (-87.656164, 41.898061), | ||||
|         ] | ||||
|         events = self.Event.objects(location__geo_within_polygon=polygon) | ||||
|         assert events.count() == 1 | ||||
|         assert events[0].id == event1.id | ||||
|         self.assertEqual(events.count(), 1) | ||||
|         self.assertEqual(events[0].id, event1.id) | ||||
| 
 | ||||
|         polygon2 = [ | ||||
|             (-1.742249, 54.033586), | ||||
|             (-1.225891, 52.792797), | ||||
|             (-4.40094, 53.389881), | ||||
|             (-4.40094, 53.389881) | ||||
|         ] | ||||
|         events = self.Event.objects(location__geo_within_polygon=polygon2) | ||||
|         assert events.count() == 0 | ||||
|         self.assertEqual(events.count(), 0) | ||||
| 
 | ||||
|     # $minDistance was added in MongoDB v2.6, but continued being buggy | ||||
|     # until v3.0; skip for older versions | ||||
|     @needs_mongodb_v3 | ||||
|     def test_2dsphere_near_and_min_max_distance(self): | ||||
|         """Ensure "min_distace" and "max_distance" operators work well | ||||
|         together with the "near" operator in a 2dsphere index. | ||||
|         """ | ||||
|         event1, event2, event3 = self._create_event_data(point_field_class=PointField) | ||||
|         event1, event2, event3 = self._create_event_data( | ||||
|             point_field_class=PointField | ||||
|         ) | ||||
| 
 | ||||
|         # ensure min_distance and max_distance combine well | ||||
|         events = self.Event.objects( | ||||
|             location__near=[-87.67892, 41.9120459], | ||||
|             location__min_distance=1000, | ||||
|             location__max_distance=10000, | ||||
|             location__max_distance=10000 | ||||
|         ).order_by("-date") | ||||
|         assert events.count() == 1 | ||||
|         assert events[0] == event3 | ||||
|         self.assertEqual(events.count(), 1) | ||||
|         self.assertEqual(events[0], event3) | ||||
| 
 | ||||
|         # ensure ordering is respected by "near" with "min_distance" | ||||
|         events = self.Event.objects( | ||||
|             location__near=[-87.67892, 41.9120459], location__min_distance=10000 | ||||
|             location__near=[-87.67892, 41.9120459], | ||||
|             location__min_distance=10000 | ||||
|         ).order_by("-date") | ||||
|         assert events.count() == 1 | ||||
|         assert events[0] == event2 | ||||
|         self.assertEqual(events.count(), 1) | ||||
|         self.assertEqual(events[0], event2) | ||||
| 
 | ||||
|     def test_2dsphere_geo_within_center(self): | ||||
|         """Make sure the "geo_within_center" operator works with a | ||||
|         2dsphere index. | ||||
|         """ | ||||
|         event1, event2, event3 = self._create_event_data(point_field_class=PointField) | ||||
|         event1, event2, event3 = self._create_event_data( | ||||
|             point_field_class=PointField | ||||
|         ) | ||||
| 
 | ||||
|         # find events within 5 degrees of pitchfork office, chicago | ||||
|         point_and_distance = [[-87.67892, 41.9120459], 2] | ||||
|         events = self.Event.objects(location__geo_within_center=point_and_distance) | ||||
|         assert events.count() == 2 | ||||
|         events = self.Event.objects( | ||||
|             location__geo_within_center=point_and_distance) | ||||
|         self.assertEqual(events.count(), 2) | ||||
|         events = list(events) | ||||
|         assert event2 not in events | ||||
|         assert event1 in events | ||||
|         assert event3 in events | ||||
|         self.assertTrue(event2 not in events) | ||||
|         self.assertTrue(event1 in events) | ||||
|         self.assertTrue(event3 in events) | ||||
| 
 | ||||
|     def _test_embedded(self, point_field_class): | ||||
|         """Helper test method ensuring given point field class works | ||||
|         well in an embedded document. | ||||
|         """ | ||||
| 
 | ||||
|         class Venue(EmbeddedDocument): | ||||
|             location = point_field_class() | ||||
|             name = StringField() | ||||
| @@ -280,18 +306,19 @@ class TestGeoQueries(MongoDBTestCase): | ||||
|         venue1 = Venue(name="The Rock", location=[-87.677137, 41.909889]) | ||||
|         venue2 = Venue(name="The Bridge", location=[-122.4194155, 37.7749295]) | ||||
| 
 | ||||
|         event1 = Event(title="Coltrane Motion @ Double Door", venue=venue1).save() | ||||
|         event2 = Event( | ||||
|             title="Coltrane Motion @ Bottom of the Hill", venue=venue2 | ||||
|         ).save() | ||||
|         event3 = Event(title="Coltrane Motion @ Empty Bottle", venue=venue1).save() | ||||
|         event1 = Event(title="Coltrane Motion @ Double Door", | ||||
|                        venue=venue1).save() | ||||
|         event2 = Event(title="Coltrane Motion @ Bottom of the Hill", | ||||
|                        venue=venue2).save() | ||||
|         event3 = Event(title="Coltrane Motion @ Empty Bottle", | ||||
|                        venue=venue1).save() | ||||
| 
 | ||||
|         # find all events "near" pitchfork office, chicago. | ||||
|         # note that "near" will show the san francisco event, too, | ||||
|         # although it sorts to last. | ||||
|         events = Event.objects(venue__location__near=[-87.67892, 41.9120459]) | ||||
|         assert events.count() == 3 | ||||
|         assert list(events) == [event1, event3, event2] | ||||
|         self.assertEqual(events.count(), 3) | ||||
|         self.assertEqual(list(events), [event1, event3, event2]) | ||||
| 
 | ||||
|     def test_geo_spatial_embedded(self): | ||||
|         """Make sure GeoPointField works properly in an embedded document.""" | ||||
| @@ -301,9 +328,10 @@ class TestGeoQueries(MongoDBTestCase): | ||||
|         """Make sure PointField works properly in an embedded document.""" | ||||
|         self._test_embedded(point_field_class=PointField) | ||||
| 
 | ||||
|     # Needs MongoDB > 2.6.4 https://jira.mongodb.org/browse/SERVER-14039 | ||||
|     @needs_mongodb_v3 | ||||
|     def test_spherical_geospatial_operators(self): | ||||
|         """Ensure that spherical geospatial queries are working.""" | ||||
| 
 | ||||
|         class Point(Document): | ||||
|             location = GeoPointField() | ||||
| 
 | ||||
| @@ -319,55 +347,58 @@ class TestGeoQueries(MongoDBTestCase): | ||||
|         # Finds both points because they are within 60 km of the reference | ||||
|         # point equidistant between them. | ||||
|         points = Point.objects(location__near_sphere=[-122, 37.5]) | ||||
|         assert points.count() == 2 | ||||
|         self.assertEqual(points.count(), 2) | ||||
| 
 | ||||
|         # Same behavior for _within_spherical_distance | ||||
|         points = Point.objects( | ||||
|             location__within_spherical_distance=[[-122, 37.5], 60 / earth_radius] | ||||
|             location__within_spherical_distance=[ | ||||
|                 [-122, 37.5], | ||||
|                 60 / earth_radius | ||||
|             ] | ||||
|         ) | ||||
|         assert points.count() == 2 | ||||
|         self.assertEqual(points.count(), 2) | ||||
| 
 | ||||
|         points = Point.objects( | ||||
|             location__near_sphere=[-122, 37.5], location__max_distance=60 / earth_radius | ||||
|         ) | ||||
|         assert points.count() == 2 | ||||
|         points = Point.objects(location__near_sphere=[-122, 37.5], | ||||
|                                location__max_distance=60 / earth_radius) | ||||
|         self.assertEqual(points.count(), 2) | ||||
| 
 | ||||
|         # Test query works with max_distance, being farer from one point | ||||
|         points = Point.objects( | ||||
|             location__near_sphere=[-122, 37.8], location__max_distance=60 / earth_radius | ||||
|         ) | ||||
|         points = Point.objects(location__near_sphere=[-122, 37.8], | ||||
|                                location__max_distance=60 / earth_radius) | ||||
|         close_point = points.first() | ||||
|         assert points.count() == 1 | ||||
|         self.assertEqual(points.count(), 1) | ||||
| 
 | ||||
|         # Test query works with min_distance, being farer from one point | ||||
|         points = Point.objects( | ||||
|             location__near_sphere=[-122, 37.8], location__min_distance=60 / earth_radius | ||||
|         ) | ||||
|         assert points.count() == 1 | ||||
|         points = Point.objects(location__near_sphere=[-122, 37.8], | ||||
|                                location__min_distance=60 / earth_radius) | ||||
|         self.assertEqual(points.count(), 1) | ||||
|         far_point = points.first() | ||||
|         assert close_point != far_point | ||||
|         self.assertNotEqual(close_point, far_point) | ||||
| 
 | ||||
|         # Finds both points, but orders the north point first because it's | ||||
|         # closer to the reference point to the north. | ||||
|         points = Point.objects(location__near_sphere=[-122, 38.5]) | ||||
|         assert points.count() == 2 | ||||
|         assert points[0].id == north_point.id | ||||
|         assert points[1].id == south_point.id | ||||
|         self.assertEqual(points.count(), 2) | ||||
|         self.assertEqual(points[0].id, north_point.id) | ||||
|         self.assertEqual(points[1].id, south_point.id) | ||||
| 
 | ||||
|         # Finds both points, but orders the south point first because it's | ||||
|         # closer to the reference point to the south. | ||||
|         points = Point.objects(location__near_sphere=[-122, 36.5]) | ||||
|         assert points.count() == 2 | ||||
|         assert points[0].id == south_point.id | ||||
|         assert points[1].id == north_point.id | ||||
|         self.assertEqual(points.count(), 2) | ||||
|         self.assertEqual(points[0].id, south_point.id) | ||||
|         self.assertEqual(points[1].id, north_point.id) | ||||
| 
 | ||||
|         # Finds only one point because only the first point is within 60km of | ||||
|         # the reference point to the south. | ||||
|         points = Point.objects( | ||||
|             location__within_spherical_distance=[[-122, 36.5], 60 / earth_radius] | ||||
|             location__within_spherical_distance=[ | ||||
|                 [-122, 36.5], | ||||
|                 60 / earth_radius | ||||
|             ] | ||||
|         ) | ||||
|         assert points.count() == 1 | ||||
|         assert points[0].id == south_point.id | ||||
|         self.assertEqual(points.count(), 1) | ||||
|         self.assertEqual(points[0].id, south_point.id) | ||||
| 
 | ||||
|     def test_linestring(self): | ||||
|         class Road(Document): | ||||
| @@ -381,51 +412,48 @@ class TestGeoQueries(MongoDBTestCase): | ||||
|         # near | ||||
|         point = {"type": "Point", "coordinates": [40, 5]} | ||||
|         roads = Road.objects.filter(line__near=point["coordinates"]).count() | ||||
|         assert 1 == roads | ||||
|         self.assertEqual(1, roads) | ||||
| 
 | ||||
|         roads = Road.objects.filter(line__near=point).count() | ||||
|         assert 1 == roads | ||||
|         self.assertEqual(1, roads) | ||||
| 
 | ||||
|         roads = Road.objects.filter(line__near={"$geometry": point}).count() | ||||
|         assert 1 == roads | ||||
|         self.assertEqual(1, roads) | ||||
| 
 | ||||
|         # Within | ||||
|         polygon = { | ||||
|             "type": "Polygon", | ||||
|             "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]], | ||||
|         } | ||||
|         polygon = {"type": "Polygon", | ||||
|                    "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]} | ||||
|         roads = Road.objects.filter(line__geo_within=polygon["coordinates"]).count() | ||||
|         assert 1 == roads | ||||
|         self.assertEqual(1, roads) | ||||
| 
 | ||||
|         roads = Road.objects.filter(line__geo_within=polygon).count() | ||||
|         assert 1 == roads | ||||
|         self.assertEqual(1, roads) | ||||
| 
 | ||||
|         roads = Road.objects.filter(line__geo_within={"$geometry": polygon}).count() | ||||
|         assert 1 == roads | ||||
|         self.assertEqual(1, roads) | ||||
| 
 | ||||
|         # Intersects | ||||
|         line = {"type": "LineString", "coordinates": [[40, 5], [40, 6]]} | ||||
|         line = {"type": "LineString", | ||||
|                 "coordinates": [[40, 5], [40, 6]]} | ||||
|         roads = Road.objects.filter(line__geo_intersects=line["coordinates"]).count() | ||||
|         assert 1 == roads | ||||
|         self.assertEqual(1, roads) | ||||
| 
 | ||||
|         roads = Road.objects.filter(line__geo_intersects=line).count() | ||||
|         assert 1 == roads | ||||
|         self.assertEqual(1, roads) | ||||
| 
 | ||||
|         roads = Road.objects.filter(line__geo_intersects={"$geometry": line}).count() | ||||
|         assert 1 == roads | ||||
|         self.assertEqual(1, roads) | ||||
| 
 | ||||
|         polygon = { | ||||
|             "type": "Polygon", | ||||
|             "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]], | ||||
|         } | ||||
|         polygon = {"type": "Polygon", | ||||
|                    "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]} | ||||
|         roads = Road.objects.filter(line__geo_intersects=polygon["coordinates"]).count() | ||||
|         assert 1 == roads | ||||
|         self.assertEqual(1, roads) | ||||
| 
 | ||||
|         roads = Road.objects.filter(line__geo_intersects=polygon).count() | ||||
|         assert 1 == roads | ||||
|         self.assertEqual(1, roads) | ||||
| 
 | ||||
|         roads = Road.objects.filter(line__geo_intersects={"$geometry": polygon}).count() | ||||
|         assert 1 == roads | ||||
|         self.assertEqual(1, roads) | ||||
| 
 | ||||
|     def test_polygon(self): | ||||
|         class Road(Document): | ||||
| @@ -439,66 +467,48 @@ class TestGeoQueries(MongoDBTestCase): | ||||
|         # near | ||||
|         point = {"type": "Point", "coordinates": [40, 5]} | ||||
|         roads = Road.objects.filter(poly__near=point["coordinates"]).count() | ||||
|         assert 1 == roads | ||||
|         self.assertEqual(1, roads) | ||||
| 
 | ||||
|         roads = Road.objects.filter(poly__near=point).count() | ||||
|         assert 1 == roads | ||||
|         self.assertEqual(1, roads) | ||||
| 
 | ||||
|         roads = Road.objects.filter(poly__near={"$geometry": point}).count() | ||||
|         assert 1 == roads | ||||
|         self.assertEqual(1, roads) | ||||
| 
 | ||||
|         # Within | ||||
|         polygon = { | ||||
|             "type": "Polygon", | ||||
|             "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]], | ||||
|         } | ||||
|         polygon = {"type": "Polygon", | ||||
|                    "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]} | ||||
|         roads = Road.objects.filter(poly__geo_within=polygon["coordinates"]).count() | ||||
|         assert 1 == roads | ||||
|         self.assertEqual(1, roads) | ||||
| 
 | ||||
|         roads = Road.objects.filter(poly__geo_within=polygon).count() | ||||
|         assert 1 == roads | ||||
|         self.assertEqual(1, roads) | ||||
| 
 | ||||
|         roads = Road.objects.filter(poly__geo_within={"$geometry": polygon}).count() | ||||
|         assert 1 == roads | ||||
|         self.assertEqual(1, roads) | ||||
| 
 | ||||
|         # Intersects | ||||
|         line = {"type": "LineString", "coordinates": [[40, 5], [41, 6]]} | ||||
|         line = {"type": "LineString", | ||||
|                 "coordinates": [[40, 5], [41, 6]]} | ||||
|         roads = Road.objects.filter(poly__geo_intersects=line["coordinates"]).count() | ||||
|         assert 1 == roads | ||||
|         self.assertEqual(1, roads) | ||||
| 
 | ||||
|         roads = Road.objects.filter(poly__geo_intersects=line).count() | ||||
|         assert 1 == roads | ||||
|         self.assertEqual(1, roads) | ||||
| 
 | ||||
|         roads = Road.objects.filter(poly__geo_intersects={"$geometry": line}).count() | ||||
|         assert 1 == roads | ||||
|         self.assertEqual(1, roads) | ||||
| 
 | ||||
|         polygon = { | ||||
|             "type": "Polygon", | ||||
|             "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]], | ||||
|         } | ||||
|         polygon = {"type": "Polygon", | ||||
|                    "coordinates": [[[40, 5], [40, 6], [41, 6], [41, 5], [40, 5]]]} | ||||
|         roads = Road.objects.filter(poly__geo_intersects=polygon["coordinates"]).count() | ||||
|         assert 1 == roads | ||||
|         self.assertEqual(1, roads) | ||||
| 
 | ||||
|         roads = Road.objects.filter(poly__geo_intersects=polygon).count() | ||||
|         assert 1 == roads | ||||
|         self.assertEqual(1, roads) | ||||
| 
 | ||||
|         roads = Road.objects.filter(poly__geo_intersects={"$geometry": polygon}).count() | ||||
|         assert 1 == roads | ||||
| 
 | ||||
|     def test_aspymongo_with_only(self): | ||||
|         """Ensure as_pymongo works with only""" | ||||
| 
 | ||||
|         class Place(Document): | ||||
|             location = PointField() | ||||
| 
 | ||||
|         Place.drop_collection() | ||||
|         p = Place(location=[24.946861267089844, 60.16311983618494]) | ||||
|         p.save() | ||||
|         qs = Place.objects().only("location") | ||||
|         assert qs.as_pymongo()[0]["location"] == { | ||||
|             u"type": u"Point", | ||||
|             u"coordinates": [24.946861267089844, 60.16311983618494], | ||||
|         } | ||||
|         self.assertEqual(1, roads) | ||||
| 
 | ||||
|     def test_2dsphere_point_sets_correctly(self): | ||||
|         class Location(Document): | ||||
| @@ -506,13 +516,13 @@ class TestGeoQueries(MongoDBTestCase): | ||||
| 
 | ||||
|         Location.drop_collection() | ||||
| 
 | ||||
|         Location(loc=[1, 2]).save() | ||||
|         Location(loc=[1,2]).save() | ||||
|         loc = Location.objects.as_pymongo()[0] | ||||
|         assert loc["loc"] == {"type": "Point", "coordinates": [1, 2]} | ||||
|         self.assertEqual(loc["loc"], {"type": "Point", "coordinates": [1, 2]}) | ||||
| 
 | ||||
|         Location.objects.update(set__loc=[2, 1]) | ||||
|         Location.objects.update(set__loc=[2,1]) | ||||
|         loc = Location.objects.as_pymongo()[0] | ||||
|         assert loc["loc"] == {"type": "Point", "coordinates": [2, 1]} | ||||
|         self.assertEqual(loc["loc"], {"type": "Point", "coordinates": [2, 1]}) | ||||
| 
 | ||||
|     def test_2dsphere_linestring_sets_correctly(self): | ||||
|         class Location(Document): | ||||
| @@ -522,11 +532,11 @@ class TestGeoQueries(MongoDBTestCase): | ||||
| 
 | ||||
|         Location(line=[[1, 2], [2, 2]]).save() | ||||
|         loc = Location.objects.as_pymongo()[0] | ||||
|         assert loc["line"] == {"type": "LineString", "coordinates": [[1, 2], [2, 2]]} | ||||
|         self.assertEqual(loc["line"], {"type": "LineString", "coordinates": [[1, 2], [2, 2]]}) | ||||
| 
 | ||||
|         Location.objects.update(set__line=[[2, 1], [1, 2]]) | ||||
|         loc = Location.objects.as_pymongo()[0] | ||||
|         assert loc["line"] == {"type": "LineString", "coordinates": [[2, 1], [1, 2]]} | ||||
|         self.assertEqual(loc["line"], {"type": "LineString", "coordinates": [[2, 1], [1, 2]]}) | ||||
| 
 | ||||
|     def test_geojson_PolygonField(self): | ||||
|         class Location(Document): | ||||
| @@ -536,18 +546,12 @@ class TestGeoQueries(MongoDBTestCase): | ||||
| 
 | ||||
|         Location(poly=[[[40, 5], [40, 6], [41, 6], [40, 5]]]).save() | ||||
|         loc = Location.objects.as_pymongo()[0] | ||||
|         assert loc["poly"] == { | ||||
|             "type": "Polygon", | ||||
|             "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]], | ||||
|         } | ||||
|         self.assertEqual(loc["poly"], {"type": "Polygon", "coordinates": [[[40, 5], [40, 6], [41, 6], [40, 5]]]}) | ||||
| 
 | ||||
|         Location.objects.update(set__poly=[[[40, 4], [40, 6], [41, 6], [40, 4]]]) | ||||
|         loc = Location.objects.as_pymongo()[0] | ||||
|         assert loc["poly"] == { | ||||
|             "type": "Polygon", | ||||
|             "coordinates": [[[40, 4], [40, 6], [41, 6], [40, 4]]], | ||||
|         } | ||||
|         self.assertEqual(loc["poly"], {"type": "Polygon", "coordinates": [[[40, 4], [40, 6], [41, 6], [40, 4]]]}) | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == "__main__": | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user