Compare commits
	
		
			275 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 6affbbe865 | ||
|  | e3600ef4de | ||
|  | 6dcd7006d0 | ||
|  | 3f49923298 | ||
|  | c277be8b6b | ||
|  | 03bfd01862 | ||
|  | dcf3c86dce | ||
|  | c2d77f51bb | ||
|  | b4d87d9128 | ||
|  | 4401a309ee | ||
|  | b562e209d1 | ||
|  | 3a85422e8f | ||
|  | e45397c975 | ||
|  | 1f9ec0c888 | ||
|  | f8ee470e70 | ||
|  | d02de0798f | ||
|  | 6fe074fb13 | ||
|  | 4db339c5f4 | ||
|  | a525764359 | ||
|  | f970d5878a | ||
|  | cc0a2cbc6f | ||
|  | add0b463f5 | ||
|  | d80b1a7749 | ||
|  | 6186691259 | ||
|  | b451cc567d | ||
|  | 757ff31661 | ||
|  | 97a98f0045 | ||
|  | 8f05896bc9 | ||
|  | da7a8939df | ||
|  | b6977a88ea | ||
|  | eafbc7f20d | ||
|  | c9a5710554 | ||
|  | f10e946896 | ||
|  | 2f19b22bb2 | ||
|  | d134e11c6d | ||
|  | 63edd16a92 | ||
|  | 37740dc010 | ||
|  | 836dc96f67 | ||
|  | 49a7542b14 | ||
|  | a84ffce5a0 | ||
|  | 210b3e5192 | ||
|  | 5f1d5ea056 | ||
|  | 19a7372ff9 | ||
|  | cc5b60b004 | ||
|  | b06f9dbf8d | ||
|  | d9b8ee7895 | ||
|  | e9ff655b0e | ||
|  | 669d21a114 | ||
|  | 7e980a16d0 | ||
|  | 47df8deb58 | ||
|  | dd006a502e | ||
|  | 07d3e52e6a | ||
|  | fc1ce6d39b | ||
|  | 32d5c0c946 | ||
|  | dfabfce01b | ||
|  | 74f3f4eb15 | ||
|  | 20cb0285f0 | ||
|  | faf840f924 | ||
|  | 165bea5bb9 | ||
|  | f7515cfca8 | ||
|  | a762a10dec | ||
|  | a192029901 | ||
|  | 67182713d9 | ||
|  | e9464e32db | ||
|  | 2d6ae16912 | ||
|  | f9cd8b1841 | ||
|  | 41a698b442 | ||
|  | 9f58bc9207 | ||
|  | d36f6e7f24 | ||
|  | eeb672feb9 | ||
|  | 063a162ce0 | ||
|  | 3e4a900279 | ||
|  | 43327ea4e1 | ||
|  | 0d2e84b16b | ||
|  | 3c78757778 | ||
|  | d0245bb5ba | ||
|  | 3477b0107a | ||
|  | 8df9ff90cb | ||
|  | d6b4ca7a98 | ||
|  | 2e18199eb2 | ||
|  | e508625935 | ||
|  | 87c965edd3 | ||
|  | 06681a453f | ||
|  | 8e038dd563 | ||
|  | e537369d98 | ||
|  | 25cdf16cc0 | ||
|  | 74343841e4 | ||
|  | 3b3738b36b | ||
|  | b15c3f6a3f | ||
|  | 2459f9b0aa | ||
|  | 6ff1bd9b3c | ||
|  | 1bc2d2ec37 | ||
|  | d7fd6a4628 | ||
|  | 9236f365fa | ||
|  | 90d22c2a28 | ||
|  | c9f6e6b62a | ||
|  | 260d9377f5 | ||
|  | 22d1ce6319 | ||
|  | 6997e02476 | ||
|  | 155d79ff4d | ||
|  | 452cd125fa | ||
|  | e62c35b040 | ||
|  | d5ec3c6a31 | ||
|  | ad983dc279 | ||
|  | bb15bf8d13 | ||
|  | 94adc207ad | ||
|  | 376d1c97ab | ||
|  | 4fe87b40da | ||
|  | b10d76cf4b | ||
|  | 3bdc9a2f09 | ||
|  | 9d52e18659 | ||
|  | 653c4259ee | ||
|  | 9f5ab8149f | ||
|  | 66c6d14f7a | ||
|  | 2c0fc142a3 | ||
|  | 0da2dfd191 | ||
|  | 787fc1cd8b | ||
|  | c31488add9 | ||
|  | 31ec7907b5 | ||
|  | 12f3f8c694 | ||
|  | 79098e997e | ||
|  | dc1849bad5 | ||
|  | e2d826c412 | ||
|  | e6d796832e | ||
|  | 6f0a6df4f6 | ||
|  | 7a877a00d5 | ||
|  | e8604d100e | ||
|  | 1647441ce8 | ||
|  | 9f8d6b3a00 | ||
|  | 4b2ad25405 | ||
|  | 3ce163b1a0 | ||
|  | 7c1ee28f13 | ||
|  | 2645e43da1 | ||
|  | 59bfe551a3 | ||
|  | e2c78047b1 | ||
|  | 6a4351e44f | ||
|  | adb60ef1ac | ||
|  | 3090adac04 | ||
|  | b9253d86cc | ||
|  | ab4d4e6230 | ||
|  | 7cd38c56c6 | ||
|  | 864053615b | ||
|  | db2366f112 | ||
|  | 4defc82192 | ||
|  | 5949970a95 | ||
|  | 0ea4abda81 | ||
|  | 5c6035d636 | ||
|  | a2183e3dcc | ||
|  | 99637151b5 | ||
|  | a8e787c120 | ||
|  | 53339c7c72 | ||
|  | 3534bf7d70 | ||
|  | 1cf3989664 | ||
|  | fd296918da | ||
|  | 8ad1f03dc5 | ||
|  | fe7e17dbd5 | ||
|  | d582394a42 | ||
|  | 02ef0df019 | ||
|  | 0dfd6aa518 | ||
|  | 0b23bc9cf2 | ||
|  | f108c4288e | ||
|  | 9b9696aefd | ||
|  | 576e198ece | ||
|  | 52f85aab18 | ||
|  | ab60fd0490 | ||
|  | d79ae30f31 | ||
|  | f27debe7f9 | ||
|  | 735e043ff6 | ||
|  | 6e7f2b73cf | ||
|  | d645ce9745 | ||
|  | 7c08c140da | ||
|  | 81d402dc17 | ||
|  | 966fa12358 | ||
|  | 87792e1921 | ||
|  | 4c8296acc6 | ||
|  | 9989da07ed | ||
|  | 1c5e6a3425 | ||
|  | eedf908770 | ||
|  | 5c9ef41403 | ||
|  | 0bf2ad5b67 | ||
|  | a0e3f382cd | ||
|  | f09c39b5d7 | ||
|  | 89c67bf259 | ||
|  | ea666d4607 | ||
|  | b8af154439 | ||
|  | f594ece32a | ||
|  | 03beb6852a | ||
|  | ab9e9a3329 | ||
|  | a4b09344af | ||
|  | 8cb8aa392c | ||
|  | 3255519792 | ||
|  | 7e64bb2503 | ||
|  | 86a78402c3 | ||
|  | ba276452fb | ||
|  | 4ffa8d0124 | ||
|  | 4bc5082681 | ||
|  | 0e3c34e1da | ||
|  | 658b3784ae | ||
|  | 0526f577ff | ||
|  | bb1b9bc1d3 | ||
|  | b1eeb77ddc | ||
|  | 999d4a7676 | ||
|  | 1b80193aac | ||
|  | be8d39a48c | ||
|  | a2f3d70f28 | ||
|  | 676a7bf712 | ||
|  | e990a6c70c | ||
|  | 90fa0f6c4a | ||
|  | 22010d7d95 | ||
|  | 66279bd90f | ||
|  | 19da228855 | ||
|  | 9e67941bad | ||
|  | 0454fc74e9 | ||
|  | 2f6b1c7611 | ||
|  | f00bed6058 | ||
|  | 529c522594 | ||
|  | 2bb9493fcf | ||
|  | 839ed8a64a | ||
|  | 017a31ffd0 | ||
|  | 83b961c84d | ||
|  | fa07423ca5 | ||
|  | dd4af2df81 | ||
|  | 44bd8cb85b | ||
|  | 52d80ac23c | ||
|  | 43a5d73e14 | ||
|  | abc764951d | ||
|  | 9cc6164026 | ||
|  | 475488b9f2 | ||
|  | 95b1783834 | ||
|  | 12c8b5c0b9 | ||
|  | f99b7a811b | ||
|  | 0575abab23 | ||
|  | 9eebcf7beb | ||
|  | ed74477150 | ||
|  | 2801b38c75 | ||
|  | dc3fea875e | ||
|  | aab8c2b687 | ||
|  | 3577773af3 | ||
|  | dd023edc0f | ||
|  | 8ac9e6dc19 | ||
|  | f45d4d781d | ||
|  | c95652d6a8 | ||
|  | 97b37f75d3 | ||
|  | 95dae48778 | ||
|  | 73635033bd | ||
|  | c1619d2a62 | ||
|  | b87ef982f6 | ||
|  | 91aa90ad4a | ||
|  | 4b3cea9e78 | ||
|  | 2420b5e937 | ||
|  | f23a976bea | ||
|  | 4226cd08f1 | ||
|  | 7a230f1693 | ||
|  | a43d0d4612 | ||
|  | 78a40a0c70 | ||
|  | 2c69d8f0b0 | ||
|  | 0018c38b83 | ||
|  | 8df81571fc | ||
|  | d1add62a06 | ||
|  | c419f3379a | ||
|  | 69d57209f7 | ||
|  | 7ca81d6fb8 | ||
|  | 8a046bfa5d | ||
|  | 3628a7653c | ||
|  | 48f988acd7 | ||
|  | 6526923345 | ||
|  | 24fd1acce6 | ||
|  | cbb9235dc5 | ||
|  | 19ec2c9bc9 | ||
|  | 6459d4c0b6 | ||
|  | 1304f2721f | ||
|  | 8bde0c0e53 | ||
|  | 598ffd3e5c | ||
|  | 1a4533a9cf | ||
|  | 601f0eb168 | 
							
								
								
									
										27
									
								
								.travis.yml
									
									
									
									
									
								
							
							
						
						
									
										27
									
								
								.travis.yml
									
									
									
									
									
								
							| @@ -1,12 +1,27 @@ | ||||
| # http://travis-ci.org/#!/MongoEngine/mongoengine | ||||
| language: python | ||||
| services: mongodb | ||||
| python: | ||||
|     - 2.6 | ||||
|     - 2.7 | ||||
|     - "2.5" | ||||
|     - "2.6" | ||||
|     - "2.7" | ||||
|     - "3.2" | ||||
|     - "3.3" | ||||
| env: | ||||
|   - PYMONGO=dev | ||||
|   - PYMONGO=2.5 | ||||
|   - PYMONGO=2.4.2 | ||||
| install: | ||||
|     - sudo apt-get install zlib1g zlib1g-dev | ||||
|     - sudo ln -s /usr/lib/i386-linux-gnu/libz.so /usr/lib/ | ||||
|     - pip install PIL --use-mirrors ; true | ||||
|     - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then cp /usr/lib/*/libz.so $VIRTUAL_ENV/lib/; fi | ||||
|     - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install pil --use-mirrors ; true; fi | ||||
|     - if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi | ||||
|     - if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi | ||||
|     - python setup.py install | ||||
| script: | ||||
|     - python setup.py test | ||||
|     - python setup.py test | ||||
| notifications: | ||||
|   irc: "irc.freenode.org#mongoengine" | ||||
| branches: | ||||
|   only: | ||||
|     - master | ||||
|     - "0.8" | ||||
|   | ||||
							
								
								
									
										29
									
								
								AUTHORS
									
									
									
									
									
								
							
							
						
						
									
										29
									
								
								AUTHORS
									
									
									
									
									
								
							| @@ -8,6 +8,7 @@ Florian Schlachter <flori@n-schlachter.de> | ||||
| Steve Challis <steve@stevechallis.com> | ||||
| Wilson Júnior <wilsonpjunior@gmail.com> | ||||
| Dan Crosta https://github.com/dcrosta | ||||
| Laine Herron https://github.com/LaineHerron | ||||
|  | ||||
| CONTRIBUTORS | ||||
|  | ||||
| @@ -105,7 +106,7 @@ that much better: | ||||
|  * Adam Reeve | ||||
|  * Anthony Nemitz | ||||
|  * deignacio | ||||
|  * shaunduncan | ||||
|  * Shaun Duncan | ||||
|  * Meir Kriheli | ||||
|  * Andrey Fedoseev | ||||
|  * aparajita | ||||
| @@ -113,4 +114,28 @@ that much better: | ||||
|  * Alexander Koshelev | ||||
|  * Jaime Irurzun | ||||
|  * Alexandre González | ||||
|  * Thomas Steinacher | ||||
|  * Thomas Steinacher | ||||
|  * Tommi Komulainen | ||||
|  * Peter Landry | ||||
|  * biszkoptwielki | ||||
|  * Anton Kolechkin | ||||
|  * Sergey Nikitin | ||||
|  * psychogenic | ||||
|  * Stefan Wójcik | ||||
|  * dimonb | ||||
|  * Garry Polley | ||||
|  * Adrian Scott | ||||
|  * Peter Teichman | ||||
|  * Jakub Kot | ||||
|  * Jorge Bastida | ||||
|  * Aleksandr Sorokoumov | ||||
|  * Yohan Graterol | ||||
|  * bool-dev | ||||
|  * Russ Weeks | ||||
|  * Paul Swartz | ||||
|  * Sundar Raman | ||||
|  * Benoit Louy | ||||
|  * lraucy | ||||
|  * hellysmile | ||||
|  * Jaepil Jeong | ||||
|  * Daniil Sharou | ||||
|   | ||||
							
								
								
									
										61
									
								
								CONTRIBUTING.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										61
									
								
								CONTRIBUTING.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,61 @@ | ||||
| Contributing to MongoEngine | ||||
| =========================== | ||||
|  | ||||
| MongoEngine has a large `community | ||||
| <https://raw.github.com/MongoEngine/mongoengine/master/AUTHORS>`_ and | ||||
| contributions are always encouraged. Contributions can be as simple as | ||||
| minor tweaks to the documentation. Please read these guidelines before | ||||
| sending a pull request. | ||||
|  | ||||
| Bugfixes and New Features | ||||
| ------------------------- | ||||
|  | ||||
| Before starting to write code, look for existing `tickets | ||||
| <https://github.com/MongoEngine/mongoengine/issues?state=open>`_ or `create one | ||||
| <https://github.com/MongoEngine/mongoengine/issues>`_ for your specific | ||||
| issue or feature request. That way you avoid working on something | ||||
| that might not be of interest or that has already been addressed.  If in doubt | ||||
| post to the `user group <http://groups.google.com/group/mongoengine-users>` | ||||
|  | ||||
| Supported Interpreters | ||||
| ---------------------- | ||||
|  | ||||
| PyMongo supports CPython 2.5 and newer. Language | ||||
| features not supported by all interpreters can not be used. | ||||
| Please also ensure that your code is properly converted by | ||||
| `2to3 <http://docs.python.org/library/2to3.html>`_ for Python 3 support. | ||||
|  | ||||
| Style Guide | ||||
| ----------- | ||||
|  | ||||
| MongoEngine aims to follow `PEP8 <http://www.python.org/dev/peps/pep-0008/>`_ | ||||
| including 4 space indents and 79 character line limits. | ||||
|  | ||||
| Testing | ||||
| ------- | ||||
|  | ||||
| All tests are run on `Travis <http://travis-ci.org/MongoEngine/mongoengine>`_ | ||||
| and any pull requests are automatically tested by Travis. Any pull requests | ||||
| without tests will take longer to be integrated and might be refused. | ||||
|  | ||||
| General Guidelines | ||||
| ------------------ | ||||
|  | ||||
| - Avoid backward breaking changes if at all possible. | ||||
| - Write inline documentation for new classes and methods. | ||||
| - Write tests and make sure they pass (make sure you have a mongod | ||||
|   running on the default port, then execute ``python setup.py test`` | ||||
|   from the cmd line to run the test suite). | ||||
| - Add yourself to AUTHORS.rst :) | ||||
|  | ||||
| Documentation | ||||
| ------------- | ||||
|  | ||||
| To contribute to the `API documentation | ||||
| <http://docs.mongoengine.org/en/latest/apireference.html>`_ | ||||
| just make your changes to the inline documentation of the appropriate | ||||
| `source code <https://github.com/MongoEngine/mongoengine>`_ or `rst file | ||||
| <https://github.com/MongoEngine/mongoengine/tree/master/docs>`_ in a | ||||
| branch and submit a `pull request <https://help.github.com/articles/using-pull-requests>`_. | ||||
| You might also use the github `Edit <https://github.com/blog/844-forking-with-the-edit-button>`_ | ||||
| button. | ||||
							
								
								
									
										8
									
								
								LICENSE
									
									
									
									
									
								
							
							
						
						
									
										8
									
								
								LICENSE
									
									
									
									
									
								
							| @@ -1,5 +1,5 @@ | ||||
| Copyright (c) 2009-2010 Harry Marr | ||||
|   | ||||
| Copyright (c) 2009 See AUTHORS | ||||
|  | ||||
| Permission is hereby granted, free of charge, to any person | ||||
| obtaining a copy of this software and associated documentation | ||||
| files (the "Software"), to deal in the Software without | ||||
| @@ -8,10 +8,10 @@ copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| copies of the Software, and to permit persons to whom the | ||||
| Software is furnished to do so, subject to the following | ||||
| conditions: | ||||
|   | ||||
|  | ||||
| The above copyright notice and this permission notice shall be | ||||
| included in all copies or substantial portions of the Software. | ||||
|   | ||||
|  | ||||
| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, | ||||
| EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES | ||||
| OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND | ||||
|   | ||||
							
								
								
									
										17
									
								
								README.rst
									
									
									
									
									
								
							
							
						
						
									
										17
									
								
								README.rst
									
									
									
									
									
								
							| @@ -2,6 +2,7 @@ | ||||
| MongoEngine | ||||
| =========== | ||||
| :Info: MongoEngine is an ORM-like layer on top of PyMongo. | ||||
| :Repository: https://github.com/MongoEngine/mongoengine | ||||
| :Author: Harry Marr (http://github.com/hmarr) | ||||
| :Maintainer: Ross Lawley (http://github.com/rozza) | ||||
|  | ||||
| @@ -13,7 +14,7 @@ About | ||||
| MongoEngine is a Python Object-Document Mapper for working with MongoDB. | ||||
| Documentation available at http://mongoengine-odm.rtfd.org - there is currently | ||||
| a `tutorial <http://readthedocs.org/docs/mongoengine-odm/en/latest/tutorial.html>`_, a `user guide | ||||
| <http://readthedocs.org/docs/mongoengine-odm/en/latest/userguide.html>`_ and an `API reference | ||||
| <https://mongoengine-odm.readthedocs.org/en/latest/guide/index.html>`_ and an `API reference | ||||
| <http://readthedocs.org/docs/mongoengine-odm/en/latest/apireference.html>`_. | ||||
|  | ||||
| Installation | ||||
| @@ -62,11 +63,6 @@ Some simple examples of what MongoEngine code looks like:: | ||||
|     ...         print 'Link:', post.url | ||||
|     ...     print | ||||
|     ... | ||||
|     === Using MongoEngine === | ||||
|     See the tutorial | ||||
|  | ||||
|     === MongoEngine Docs === | ||||
|     Link: hmarr.com/mongoengine | ||||
|  | ||||
|     >>> len(BlogPost.objects) | ||||
|     2 | ||||
| @@ -84,7 +80,7 @@ Some simple examples of what MongoEngine code looks like:: | ||||
| Tests | ||||
| ===== | ||||
| To run the test suite, ensure you are running a local instance of MongoDB on | ||||
| the standard port, and run ``python setup.py test``. | ||||
| the standard port, and run: ``python setup.py test``. | ||||
|  | ||||
| Community | ||||
| ========= | ||||
| @@ -92,11 +88,8 @@ Community | ||||
|   <http://groups.google.com/group/mongoengine-users>`_ | ||||
| - `MongoEngine Developers mailing list | ||||
|   <http://groups.google.com/group/mongoengine-dev>`_ | ||||
| - `#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_ | ||||
| - `#mongoengine IRC channel <http://webchat.freenode.net/?channels=mongoengine>`_ | ||||
|  | ||||
| Contributing | ||||
| ============ | ||||
| The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ - to | ||||
| contribute to the project, fork it on GitHub and send a pull request, all | ||||
| contributions and suggestions are welcome! | ||||
|  | ||||
| We welcome contributions! see  the`Contribution guidelines <https://github.com/MongoEngine/mongoengine/blob/master/CONTRIBUTING.rst>`_ | ||||
|   | ||||
							
								
								
									
										49
									
								
								benchmark.py
									
									
									
									
									
								
							
							
						
						
									
										49
									
								
								benchmark.py
									
									
									
									
									
								
							| @@ -28,47 +28,64 @@ def main(): | ||||
|  | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - Pymongo | ||||
|     1.1141769886 | ||||
|     3.86744189262 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine | ||||
|     2.37724113464 | ||||
|     6.23374891281 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False | ||||
|     1.92479610443 | ||||
|     5.33027005196 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False | ||||
|     pass - No Cascade | ||||
|  | ||||
|     0.5.X | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - Pymongo | ||||
|     1.10552310944 | ||||
|     3.89597702026 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine | ||||
|     16.5169169903 | ||||
|     21.7735359669 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False | ||||
|     14.9446101189 | ||||
|     19.8670389652 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False | ||||
|     14.912801981 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, force=True | ||||
|     14.9617750645 | ||||
|     pass - No Cascade | ||||
|  | ||||
|     Performance | ||||
|     0.6.X | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - Pymongo | ||||
|     1.10072994232 | ||||
|     3.81559205055 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine | ||||
|     5.27341103554 | ||||
|     10.0446798801 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False | ||||
|     4.49365401268 | ||||
|     9.51354718208 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False | ||||
|     4.43459296227 | ||||
|     9.02567505836 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, force=True | ||||
|     4.40114378929 | ||||
|     8.44933390617 | ||||
|  | ||||
|     0.7.X | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - Pymongo | ||||
|     3.78801012039 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine | ||||
|     9.73050498962 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False | ||||
|     8.33456707001 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False | ||||
|     8.37778115273 | ||||
|     ---------------------------------------------------------------------------------------------------- | ||||
|     Creating 10000 dictionaries - MongoEngine, force=True | ||||
|     8.36906409264 | ||||
|     """ | ||||
|  | ||||
|     setup = """ | ||||
|   | ||||
| @@ -2,6 +2,119 @@ | ||||
| Changelog | ||||
| ========= | ||||
|  | ||||
| Changes in 0.7.10 | ||||
| ================= | ||||
| - Fix UnicodeEncodeError for dbref (#278) | ||||
| - Allow construction using positional parameters (#268) | ||||
| - Updated EmailField length to support long domains (#243) | ||||
| - Added 64-bit integer support (#251) | ||||
| - Added Django sessions TTL support (#224) | ||||
| - Fixed issue with numerical keys in MapField(EmbeddedDocumentField()) (#240) | ||||
| - Fixed clearing _changed_fields for complex nested embedded documents (#237, #239, #242) | ||||
| - Added "id" back to _data dictionary (#255) | ||||
| - Only mark a field as changed if the value has changed (#258) | ||||
| - Explicitly check for Document instances when dereferencing (#261) | ||||
| - Fixed order_by chaining issue (#265) | ||||
| - Added dereference support for tuples (#250) | ||||
| - Resolve field name to db field name when using distinct(#260, #264, #269) | ||||
| - Added kwargs to doc.save to help interop with django (#223, #270) | ||||
| - Fixed cloning querysets in PY3 | ||||
| - Int fields no longer unset in save when changed to 0 (#272) | ||||
| - Fixed ReferenceField query chaining bug fixed (#254) | ||||
|  | ||||
| Changes in 0.7.9 | ||||
| ================ | ||||
| - Better fix handling for old style _types | ||||
| - Embedded SequenceFields follow collection naming convention | ||||
|  | ||||
| Changes in 0.7.8 | ||||
| ================ | ||||
| - Fix sequence fields in embedded documents (MongoEngine/mongoengine#166) | ||||
| - Fix query chaining with .order_by() (MongoEngine/mongoengine#176) | ||||
| - Added optional encoding and collection config for Django sessions (MongoEngine/mongoengine#180, MongoEngine/mongoengine#181, MongoEngine/mongoengine#183) | ||||
| - Fixed EmailField so can add extra validation (MongoEngine/mongoengine#173, MongoEngine/mongoengine#174, MongoEngine/mongoengine#187) | ||||
| - Fixed bulk inserts can now handle custom pk's (MongoEngine/mongoengine#192) | ||||
| - Added as_pymongo method to return raw or cast results from pymongo (MongoEngine/mongoengine#193) | ||||
|  | ||||
| Changes in 0.7.7 | ||||
| ================ | ||||
| - Fix handling for old style _types | ||||
|  | ||||
| Changes in 0.7.6 | ||||
| ================ | ||||
| - Unicode fix for repr (MongoEngine/mongoengine#133) | ||||
| - Allow updates with match operators (MongoEngine/mongoengine#144) | ||||
| - Updated URLField - now can have a override the regex (MongoEngine/mongoengine#136) | ||||
| - Allow Django AuthenticationBackends to work with Django user (hmarr/mongoengine#573) | ||||
| - Fixed reload issue with ReferenceField where dbref=False (MongoEngine/mongoengine#138) | ||||
|  | ||||
| Changes in 0.7.5 | ||||
| ================ | ||||
| - ReferenceFields with dbref=False use ObjectId instead of strings (MongoEngine/mongoengine#134) | ||||
|   See ticket for upgrade notes (https://github.com/MongoEngine/mongoengine/issues/134) | ||||
|  | ||||
| Changes in 0.7.4 | ||||
| ================ | ||||
| - Fixed index inheritance issues - firmed up testcases (MongoEngine/mongoengine#123) (MongoEngine/mongoengine#125) | ||||
|  | ||||
| Changes in 0.7.3 | ||||
| ================ | ||||
| - Reverted EmbeddedDocuments meta handling - now can turn off inheritance (MongoEngine/mongoengine#119) | ||||
|  | ||||
| Changes in 0.7.2 | ||||
| ================ | ||||
| - Update index spec generation so its not destructive (MongoEngine/mongoengine#113) | ||||
|  | ||||
| Changes in 0.7.1 | ||||
| ================= | ||||
| - Fixed index spec inheritance (MongoEngine/mongoengine#111) | ||||
|  | ||||
| Changes in 0.7.0 | ||||
| ================= | ||||
| - Updated queryset.delete so you can use with skip / limit (MongoEngine/mongoengine#107) | ||||
| - Updated index creation allows kwargs to be passed through refs (MongoEngine/mongoengine#104) | ||||
| - Fixed Q object merge edge case (MongoEngine/mongoengine#109) | ||||
| - Fixed reloading on sharded documents (hmarr/mongoengine#569) | ||||
| - Added NotUniqueError for duplicate keys (MongoEngine/mongoengine#62) | ||||
| - Added custom collection / sequence naming for SequenceFields (MongoEngine/mongoengine#92) | ||||
| - Fixed UnboundLocalError in composite index with pk field (MongoEngine/mongoengine#88) | ||||
| - Updated ReferenceField's to optionally store ObjectId strings | ||||
|   this will become the default in 0.8 (MongoEngine/mongoengine#89) | ||||
| - Added FutureWarning - save will default to `cascade=False` in 0.8 | ||||
| - Added example of indexing embedded document fields (MongoEngine/mongoengine#75) | ||||
| - Fixed ImageField resizing when forcing size (MongoEngine/mongoengine#80) | ||||
| - Add flexibility for fields handling bad data (MongoEngine/mongoengine#78) | ||||
| - Embedded Documents no longer handle meta definitions | ||||
| - Use weakref proxies in base lists / dicts (MongoEngine/mongoengine#74) | ||||
| - Improved queryset filtering (hmarr/mongoengine#554) | ||||
| - Fixed Dynamic Documents and Embedded Documents (hmarr/mongoengine#561) | ||||
| - Fixed abstract classes and shard keys (MongoEngine/mongoengine#64) | ||||
| - Fixed Python 2.5 support | ||||
| - Added Python 3 support (thanks to Laine Heron) | ||||
|  | ||||
| Changes in 0.6.20 | ||||
| ================= | ||||
| - Added support for distinct and db_alias (MongoEngine/mongoengine#59) | ||||
| - Improved support for chained querysets when constraining the same fields (hmarr/mongoengine#554) | ||||
| - Fixed BinaryField lookup re (MongoEngine/mongoengine#48) | ||||
|  | ||||
| Changes in 0.6.19 | ||||
| ================= | ||||
|  | ||||
| - Added Binary support to UUID (MongoEngine/mongoengine#47) | ||||
| - Fixed MapField lookup for fields without declared lookups (MongoEngine/mongoengine#46) | ||||
| - Fixed BinaryField python value issue (MongoEngine/mongoengine#48) | ||||
| - Fixed SequenceField non numeric value lookup (MongoEngine/mongoengine#41) | ||||
| - Fixed queryset manager issue (MongoEngine/mongoengine#52) | ||||
| - Fixed FileField comparision (hmarr/mongoengine#547) | ||||
|  | ||||
| Changes in 0.6.18 | ||||
| ================= | ||||
| - Fixed recursion loading bug in _get_changed_fields | ||||
|  | ||||
| Changes in 0.6.17 | ||||
| ================= | ||||
| - Fixed issue with custom queryset manager expecting explict variable names | ||||
|  | ||||
| Changes in 0.6.16 | ||||
| ================= | ||||
| @@ -27,7 +140,7 @@ Changes in 0.6.14 | ||||
| - Added support for add_to_set and each | ||||
|  | ||||
| Changes in 0.6.13 | ||||
| ================ | ||||
| ================= | ||||
| - Fixed EmbeddedDocument db_field validation issue | ||||
| - Fixed StringField unicode issue | ||||
| - Fixes __repr__ modifying the cursor | ||||
| @@ -63,7 +176,7 @@ Changes in 0.6.8 | ||||
| ================ | ||||
| - Fixed FileField losing reference when no default set | ||||
| - Removed possible race condition from FileField (grid_file) | ||||
| - Added assignment to save, can now do: b = MyDoc(**kwargs).save() | ||||
| - Added assignment to save, can now do: `b = MyDoc(**kwargs).save()` | ||||
| - Added support for pull operations on nested EmbeddedDocuments | ||||
| - Added support for choices with GenericReferenceFields | ||||
| - Added support for choices with GenericEmbeddedDocumentFields | ||||
|   | ||||
| @@ -10,6 +10,16 @@ In your **settings.py** file, ignore the standard database settings (unless you | ||||
| also plan to use the ORM in your project), and instead call | ||||
| :func:`~mongoengine.connect` somewhere in the settings module. | ||||
|  | ||||
| .. note :: | ||||
|    If you are not using another Database backend make sure you add  a dummy | ||||
|    backend, by adding the following to ``settings.py``:: | ||||
|  | ||||
|         DATABASES = { | ||||
|             'default': { | ||||
|                 'ENGINE': 'django.db.backends.dummy' | ||||
|             } | ||||
|         } | ||||
|  | ||||
| Authentication | ||||
| ============== | ||||
| MongoEngine includes a Django authentication backend, which uses MongoDB. The | ||||
| @@ -45,6 +55,9 @@ into you settings module:: | ||||
|  | ||||
|     SESSION_ENGINE = 'mongoengine.django.sessions' | ||||
|  | ||||
| Django provides session cookie, which expires after ```SESSION_COOKIE_AGE``` seconds, but doesnt delete cookie at sessions backend, so ``'mongoengine.django.sessions'`` supports  `mongodb TTL | ||||
| <http://docs.mongodb.org/manual/tutorial/expire-data/>`_. | ||||
|  | ||||
| .. versionadded:: 0.2.1 | ||||
|  | ||||
| Storage | ||||
|   | ||||
| @@ -259,6 +259,35 @@ as the constructor's argument:: | ||||
|         content = StringField() | ||||
|  | ||||
|  | ||||
| .. _one-to-many-with-listfields: | ||||
|  | ||||
| One to Many with ListFields | ||||
| ''''''''''''''''''''''''''' | ||||
|  | ||||
| If you are implementing a one to many relationship via a list of references, | ||||
| then the references are stored as DBRefs and to query you need to pass an | ||||
| instance of the object to the query:: | ||||
|  | ||||
|     class User(Document): | ||||
|         name = StringField() | ||||
|  | ||||
|     class Page(Document): | ||||
|         content = StringField() | ||||
|         authors = ListField(ReferenceField(User)) | ||||
|  | ||||
|     bob = User(name="Bob Jones").save() | ||||
|     john = User(name="John Smith").save() | ||||
|  | ||||
|     Page(content="Test Page", authors=[bob, john]).save() | ||||
|     Page(content="Another Page", authors=[john]).save() | ||||
|  | ||||
|     # Find all pages Bob authored | ||||
|     Page.objects(authors__in=[bob]) | ||||
|  | ||||
|     # Find all pages that both Bob and John have authored | ||||
|     Page.objects(authors__all=[bob, john]) | ||||
|  | ||||
|  | ||||
| Dealing with deletion of referred documents | ||||
| ''''''''''''''''''''''''''''''''''''''''''' | ||||
| By default, MongoDB doesn't check the integrity of your data, so deleting | ||||
| @@ -315,6 +344,10 @@ Its value can take any of the following constants: | ||||
|    their :file:`models.py` in the :const:`INSTALLED_APPS` tuple. | ||||
|  | ||||
|  | ||||
| .. warning:: | ||||
|    Signals are not triggered when doing cascading updates / deletes - if this | ||||
|    is required you must manually handle the update / delete. | ||||
|  | ||||
| Generic reference fields | ||||
| '''''''''''''''''''''''' | ||||
| A second kind of reference field also exists, | ||||
| @@ -436,13 +469,18 @@ If a dictionary is passed then the following options are available: | ||||
|     Whether the index should be sparse. | ||||
|  | ||||
| :attr:`unique` (Default: False) | ||||
|     Whether the index should be sparse. | ||||
|     Whether the index should be unique. | ||||
|  | ||||
| .. note :: | ||||
|  | ||||
|     To index embedded files / dictionary fields use 'dot' notation eg: | ||||
|     `rank.title` | ||||
|  | ||||
| .. warning:: | ||||
|  | ||||
|  | ||||
|    Inheritance adds extra indices. | ||||
|    If don't need inheritance for a document turn inheritance off - see :ref:`document-inheritance`. | ||||
|     Inheritance adds extra indices. | ||||
|     If don't need inheritance for a document turn inheritance off - | ||||
|     see :ref:`document-inheritance`. | ||||
|  | ||||
|  | ||||
| Geospatial indexes | ||||
|   | ||||
| @@ -232,7 +232,7 @@ custom manager methods as you like:: | ||||
|     BlogPost(title='test1', published=False).save() | ||||
|     BlogPost(title='test2', published=True).save() | ||||
|     assert len(BlogPost.objects) == 2 | ||||
|     assert len(BlogPost.live_posts) == 1 | ||||
|     assert len(BlogPost.live_posts()) == 1 | ||||
|  | ||||
| Custom QuerySets | ||||
| ================ | ||||
| @@ -243,11 +243,16 @@ a document, set ``queryset_class`` to the custom class in a | ||||
| :class:`~mongoengine.Document`\ s ``meta`` dictionary:: | ||||
|  | ||||
|     class AwesomerQuerySet(QuerySet): | ||||
|         pass | ||||
|  | ||||
|         def get_awesome(self): | ||||
|             return self.filter(awesome=True) | ||||
|  | ||||
|     class Page(Document): | ||||
|         meta = {'queryset_class': AwesomerQuerySet} | ||||
|  | ||||
|     # To call: | ||||
|     Page.objects.get_awesome() | ||||
|  | ||||
| .. versionadded:: 0.4 | ||||
|  | ||||
| Aggregation | ||||
|   | ||||
| @@ -50,4 +50,11 @@ Example usage:: | ||||
|     signals.post_save.connect(Author.post_save, sender=Author) | ||||
|  | ||||
|  | ||||
| ReferenceFields and signals | ||||
| --------------------------- | ||||
|  | ||||
| Currently `reverse_delete_rules` do not trigger signals on the other part of | ||||
| the relationship.  If this is required you must manually handled the | ||||
| reverse deletion. | ||||
|  | ||||
| .. _blinker: http://pypi.python.org/pypi/blinker | ||||
|   | ||||
| @@ -34,10 +34,10 @@ To get help with using MongoEngine, use the `MongoEngine Users mailing list | ||||
| Contributing | ||||
| ------------ | ||||
|  | ||||
| The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_ and | ||||
| The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ and | ||||
| contributions are always encouraged. Contributions can be as simple as | ||||
| minor tweaks to this documentation. To contribute, fork the project on | ||||
| `GitHub <http://github.com/hmarr/mongoengine>`_ and send a | ||||
| `GitHub <http://github.com/MongoEngine/mongoengine>`_ and send a | ||||
| pull request. | ||||
|  | ||||
| Also, you can join the developers' `mailing list | ||||
|   | ||||
							
								
								
									
										100
									
								
								docs/upgrade.rst
									
									
									
									
									
								
							
							
						
						
									
										100
									
								
								docs/upgrade.rst
									
									
									
									
									
								
							| @@ -2,18 +2,86 @@ | ||||
| Upgrading | ||||
| ========= | ||||
|  | ||||
| 0.6 to 0.7 | ||||
| ========== | ||||
|  | ||||
| Cascade saves | ||||
| ------------- | ||||
|  | ||||
| Saves will raise a `FutureWarning` if they cascade and cascade hasn't been set | ||||
| to True.  This is because in 0.8 it will default to False.  If you require | ||||
| cascading saves then either set it in the `meta` or pass | ||||
| via `save` eg :: | ||||
|  | ||||
|     # At the class level: | ||||
|     class Person(Document): | ||||
|         meta = {'cascade': True} | ||||
|  | ||||
|     # Or in code: | ||||
|     my_document.save(cascade=True) | ||||
|  | ||||
| .. note :: | ||||
|     Remember: cascading saves **do not** cascade through lists. | ||||
|  | ||||
| ReferenceFields | ||||
| --------------- | ||||
|  | ||||
| ReferenceFields now can store references as ObjectId strings instead of DBRefs. | ||||
| This will become the default in 0.8 and if `dbref` is not set a `FutureWarning` | ||||
| will be raised. | ||||
|  | ||||
|  | ||||
| To explicitly continue to use DBRefs change the `dbref` flag | ||||
| to True :: | ||||
|  | ||||
|    class Person(Document): | ||||
|        groups = ListField(ReferenceField(Group, dbref=True)) | ||||
|  | ||||
| To migrate to using strings instead of DBRefs you will have to manually | ||||
| migrate :: | ||||
|  | ||||
|         # Step 1 - Migrate the model definition | ||||
|         class Group(Document): | ||||
|             author = ReferenceField(User, dbref=False) | ||||
|             members = ListField(ReferenceField(User, dbref=False)) | ||||
|  | ||||
|         # Step 2 - Migrate the data | ||||
|         for g in Group.objects(): | ||||
|             g.author = g.author | ||||
|             g.members = g.members | ||||
|             g.save() | ||||
|  | ||||
|  | ||||
| item_frequencies | ||||
| ---------------- | ||||
|  | ||||
| In the 0.6 series we added support for null / zero / false values in | ||||
| item_frequencies.  A side effect was to return keys in the value they are | ||||
| stored in rather than as string representations.  Your code may need to be | ||||
| updated to handle native types rather than strings keys for the results of | ||||
| item frequency queries. | ||||
|  | ||||
| BinaryFields | ||||
| ------------ | ||||
|  | ||||
| Binary fields have been updated so that they are native binary types.  If you | ||||
| previously were doing `str` comparisons with binary field values you will have | ||||
| to update and wrap the value in a `str`. | ||||
|  | ||||
| 0.5 to 0.6 | ||||
| ========== | ||||
|  | ||||
| Embedded Documents - if you had a `pk` field you will have to rename it from `_id` | ||||
| to `pk` as pk is no longer a property of Embedded Documents. | ||||
| Embedded Documents - if you had a `pk` field you will have to rename it from | ||||
| `_id` to `pk` as pk is no longer a property of Embedded Documents. | ||||
|  | ||||
| Reverse Delete Rules in Embedded Documents, MapFields and DictFields now throw | ||||
| an InvalidDocument error as they aren't currently supported. | ||||
|  | ||||
| Document._get_subclasses - Is no longer used and the class method has been removed. | ||||
| Document._get_subclasses - Is no longer used and the class method has been | ||||
| removed. | ||||
|  | ||||
| Document.objects.with_id - now raises an InvalidQueryError if used with a filter. | ||||
| Document.objects.with_id - now raises an InvalidQueryError if used with a | ||||
| filter. | ||||
|  | ||||
| FutureWarning - A future warning has been added to all inherited classes that | ||||
| don't define `allow_inheritance` in their meta. | ||||
| @@ -37,11 +105,11 @@ human-readable name for the option. | ||||
| PyMongo / MongoDB | ||||
| ----------------- | ||||
|  | ||||
| map reduce now requires pymongo 1.11+- The pymongo merge_output and reduce_output | ||||
| parameters, have been depreciated. | ||||
| map reduce now requires pymongo 1.11+- The pymongo `merge_output` and | ||||
| `reduce_output` parameters, have been depreciated. | ||||
|  | ||||
| More methods now use map_reduce as db.eval is not supported for sharding as such | ||||
| the following have been changed: | ||||
| More methods now use map_reduce as db.eval is not supported for sharding as | ||||
| such the following have been changed: | ||||
|  | ||||
|     * :meth:`~mongoengine.queryset.QuerySet.sum` | ||||
|     * :meth:`~mongoengine.queryset.QuerySet.average` | ||||
| @@ -51,8 +119,8 @@ the following have been changed: | ||||
| Default collection naming | ||||
| ------------------------- | ||||
|  | ||||
| Previously it was just lowercase, its now much more pythonic and readable as its | ||||
| lowercase and underscores, previously :: | ||||
| Previously it was just lowercase, its now much more pythonic and readable as | ||||
| its lowercase and underscores, previously :: | ||||
|  | ||||
|     class MyAceDocument(Document): | ||||
|         pass | ||||
| @@ -88,7 +156,8 @@ Alternatively, you can rename your collections eg :: | ||||
|  | ||||
|         failure = False | ||||
|  | ||||
|         collection_names = [d._get_collection_name() for d in _document_registry.values()] | ||||
|         collection_names = [d._get_collection_name() | ||||
|                             for d in _document_registry.values()] | ||||
|  | ||||
|         for new_style_name in collection_names: | ||||
|             if not new_style_name:  # embedded documents don't have collections | ||||
| @@ -106,10 +175,17 @@ Alternatively, you can rename your collections eg :: | ||||
|                         old_style_name, new_style_name) | ||||
|                 else: | ||||
|                     db[old_style_name].rename(new_style_name) | ||||
|                     print "Renamed:  %s to %s" % (old_style_name, new_style_name) | ||||
|                     print "Renamed:  %s to %s" % (old_style_name, | ||||
|                                                   new_style_name) | ||||
|  | ||||
|         if failure: | ||||
|             print "Upgrading  collection names failed" | ||||
|         else: | ||||
|             print "Upgraded collection names" | ||||
|  | ||||
|  | ||||
| mongodb 1.8 > 2.0 + | ||||
| =================== | ||||
|  | ||||
| Its been reported that indexes may need to be recreated to the newer version of indexes.   | ||||
| To do this drop indexes and call ``ensure_indexes`` on each model. | ||||
|   | ||||
| @@ -12,13 +12,12 @@ from signals import * | ||||
| __all__ = (document.__all__ + fields.__all__ + connection.__all__ + | ||||
|            queryset.__all__ + signals.__all__) | ||||
|  | ||||
| VERSION = (0, 6, 16) | ||||
| VERSION = (0, 7, 10) | ||||
|  | ||||
|  | ||||
| def get_version(): | ||||
|     version = '%s.%s' % (VERSION[0], VERSION[1]) | ||||
|     if VERSION[2]: | ||||
|         version = '%s.%s' % (version, VERSION[2]) | ||||
|     return version | ||||
|     if isinstance(VERSION[-1], basestring): | ||||
|         return '.'.join(map(str, VERSION[:-1])) + VERSION[-1] | ||||
|     return '.'.join(map(str, VERSION)) | ||||
|  | ||||
| __version__ = get_version() | ||||
|   | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -31,15 +31,34 @@ class DeReference(object): | ||||
|             items = [i for i in items] | ||||
|  | ||||
|         self.max_depth = max_depth | ||||
|  | ||||
|         doc_type = None | ||||
|         if instance and instance._fields: | ||||
|             doc_type = instance._fields[name].field | ||||
|  | ||||
|         if instance and isinstance(instance, (Document, TopLevelDocumentMetaclass)): | ||||
|             doc_type = instance._fields.get(name) | ||||
|             if hasattr(doc_type, 'field'): | ||||
|                 doc_type = doc_type.field | ||||
|  | ||||
|             if isinstance(doc_type, ReferenceField): | ||||
|                 field = doc_type | ||||
|                 doc_type = doc_type.document_type | ||||
|                 if all([i.__class__ == doc_type for i in items]): | ||||
|                 is_list = not hasattr(items, 'items') | ||||
|  | ||||
|                 if is_list and all([i.__class__ == doc_type for i in items]): | ||||
|                     return items | ||||
|                 elif not is_list and all([i.__class__ == doc_type | ||||
|                                          for i in items.values()]): | ||||
|                     return items | ||||
|                 elif not field.dbref: | ||||
|                     if not hasattr(items, 'items'): | ||||
|                         items = [field.to_python(v) | ||||
|                              if not isinstance(v, (DBRef, Document)) else v | ||||
|                              for v in items] | ||||
|                     else: | ||||
|                         items = dict([ | ||||
|                             (k, field.to_python(v)) | ||||
|                             if not isinstance(v, (DBRef, Document)) else (k, v) | ||||
|                             for k, v in items.iteritems()] | ||||
|                         ) | ||||
|  | ||||
|         self.reference_map = self._find_references(items) | ||||
|         self.object_map = self._fetch_objects(doc_type=doc_type) | ||||
| @@ -65,7 +84,7 @@ class DeReference(object): | ||||
|         # Recursively find dbreferences | ||||
|         depth += 1 | ||||
|         for k, item in iterator: | ||||
|             if hasattr(item, '_fields'): | ||||
|             if isinstance(item, Document): | ||||
|                 for field_name, field in item._fields.iteritems(): | ||||
|                     v = item._data.get(field_name, None) | ||||
|                     if isinstance(v, (DBRef)): | ||||
| @@ -96,7 +115,7 @@ class DeReference(object): | ||||
|         object_map = {} | ||||
|         for col, dbrefs in self.reference_map.iteritems(): | ||||
|             keys = object_map.keys() | ||||
|             refs = list(set([dbref for dbref in dbrefs if str(dbref) not in keys])) | ||||
|             refs = list(set([dbref for dbref in dbrefs if unicode(dbref).encode('utf-8') not in keys])) | ||||
|             if hasattr(col, 'objects'):  # We have a document class for the refs | ||||
|                 references = col.objects.in_bulk(refs) | ||||
|                 for key, doc in references.iteritems(): | ||||
| @@ -115,7 +134,7 @@ class DeReference(object): | ||||
|                         elif doc_type is None: | ||||
|                             doc = get_document( | ||||
|                                 ''.join(x.capitalize() | ||||
|                                         for x in col.split('_')))._from_son(ref) | ||||
|                                     for x in col.split('_')))._from_son(ref) | ||||
|                         else: | ||||
|                             doc = doc_type._from_son(ref) | ||||
|                         object_map[doc.id] = doc | ||||
| @@ -147,11 +166,12 @@ class DeReference(object): | ||||
|                 return self.object_map.get(items['_ref'].id, items) | ||||
|             elif '_types' in items and '_cls' in items: | ||||
|                 doc = get_document(items['_cls'])._from_son(items) | ||||
|                 doc._data = self._attach_objects(doc._data, depth, doc, name) | ||||
|                 doc._data = self._attach_objects(doc._data, depth, doc, None) | ||||
|                 return doc | ||||
|  | ||||
|         if not hasattr(items, 'items'): | ||||
|             is_list = True | ||||
|             as_tuple = isinstance(items, tuple) | ||||
|             iterator = enumerate(items) | ||||
|             data = [] | ||||
|         else: | ||||
| @@ -168,7 +188,7 @@ class DeReference(object): | ||||
|  | ||||
|             if k in self.object_map and not is_list: | ||||
|                 data[k] = self.object_map[k] | ||||
|             elif hasattr(v, '_fields'): | ||||
|             elif isinstance(v, Document): | ||||
|                 for field_name, field in v._fields.iteritems(): | ||||
|                     v = data[k]._data.get(field_name, None) | ||||
|                     if isinstance(v, (DBRef)): | ||||
| @@ -186,7 +206,7 @@ class DeReference(object): | ||||
|  | ||||
|         if instance and name: | ||||
|             if is_list: | ||||
|                 return BaseList(data, instance, name) | ||||
|                 return tuple(data) if as_tuple else BaseList(data, instance, name) | ||||
|             return BaseDict(data, instance, name) | ||||
|         depth += 1 | ||||
|         return data | ||||
|   | ||||
| @@ -3,6 +3,8 @@ import datetime | ||||
| from mongoengine import * | ||||
|  | ||||
| from django.utils.encoding import smart_str | ||||
| from django.contrib.auth.models import _user_get_all_permissions | ||||
| from django.contrib.auth.models import _user_has_perm | ||||
| from django.contrib.auth.models import AnonymousUser | ||||
| from django.utils.translation import ugettext_lazy as _ | ||||
|  | ||||
| @@ -104,6 +106,25 @@ class User(Document): | ||||
|         """ | ||||
|         return check_password(raw_password, self.password) | ||||
|  | ||||
|     def get_all_permissions(self, obj=None): | ||||
|         return _user_get_all_permissions(self, obj) | ||||
|  | ||||
|     def has_perm(self, perm, obj=None): | ||||
|         """ | ||||
|         Returns True if the user has the specified permission. This method | ||||
|         queries all available auth backends, but returns immediately if any | ||||
|         backend returns True. Thus, a user who has permission from a single | ||||
|         auth backend is assumed to have permission in general. If an object is | ||||
|         provided, permissions for this specific object are checked. | ||||
|         """ | ||||
|  | ||||
|         # Active superusers have all permissions. | ||||
|         if self.is_active and self.is_superuser: | ||||
|             return True | ||||
|  | ||||
|         # Otherwise we need to check the backends. | ||||
|         return _user_has_perm(self, perm, obj) | ||||
|  | ||||
|     @classmethod | ||||
|     def create_user(cls, username, password, email=None): | ||||
|         """Create (and save) a new user with the given username, password and | ||||
|   | ||||
| @@ -15,15 +15,33 @@ MONGOENGINE_SESSION_DB_ALIAS = getattr( | ||||
|     settings, 'MONGOENGINE_SESSION_DB_ALIAS', | ||||
|     DEFAULT_CONNECTION_NAME) | ||||
|  | ||||
| # a setting for the name of the collection used to store sessions | ||||
| MONGOENGINE_SESSION_COLLECTION = getattr( | ||||
|     settings, 'MONGOENGINE_SESSION_COLLECTION', | ||||
|     'django_session') | ||||
|  | ||||
| # a setting for whether session data is stored encoded or not | ||||
| MONGOENGINE_SESSION_DATA_ENCODE = getattr( | ||||
|     settings, 'MONGOENGINE_SESSION_DATA_ENCODE', | ||||
|     True) | ||||
|  | ||||
| class MongoSession(Document): | ||||
|     session_key = fields.StringField(primary_key=True, max_length=40) | ||||
|     session_data = fields.StringField() | ||||
|     session_data = fields.StringField() if MONGOENGINE_SESSION_DATA_ENCODE \ | ||||
|                                         else fields.DictField() | ||||
|     expire_date = fields.DateTimeField() | ||||
|  | ||||
|     meta = {'collection': 'django_session', | ||||
|             'db_alias': MONGOENGINE_SESSION_DB_ALIAS, | ||||
|             'allow_inheritance': False} | ||||
|     meta = { | ||||
|         'collection': MONGOENGINE_SESSION_COLLECTION, | ||||
|         'db_alias': MONGOENGINE_SESSION_DB_ALIAS, | ||||
|         'allow_inheritance': False, | ||||
|         'indexes': [ | ||||
|             { | ||||
|                 'fields': ['expire_date'], | ||||
|                 'expireAfterSeconds': settings.SESSION_COOKIE_AGE | ||||
|             } | ||||
|         ] | ||||
|     } | ||||
|  | ||||
|  | ||||
| class SessionStore(SessionBase): | ||||
| @@ -34,7 +52,10 @@ class SessionStore(SessionBase): | ||||
|         try: | ||||
|             s = MongoSession.objects(session_key=self.session_key, | ||||
|                                      expire_date__gt=datetime.now())[0] | ||||
|             return self.decode(force_unicode(s.session_data)) | ||||
|             if MONGOENGINE_SESSION_DATA_ENCODE: | ||||
|                 return self.decode(force_unicode(s.session_data)) | ||||
|             else: | ||||
|                 return s.session_data | ||||
|         except (IndexError, SuspiciousOperation): | ||||
|             self.create() | ||||
|             return {} | ||||
| @@ -57,7 +78,10 @@ class SessionStore(SessionBase): | ||||
|         if self.session_key is None: | ||||
|             self._session_key = self._get_new_session_key() | ||||
|         s = MongoSession(session_key=self.session_key) | ||||
|         s.session_data = self.encode(self._get_session(no_load=must_create)) | ||||
|         if MONGOENGINE_SESSION_DATA_ENCODE: | ||||
|             s.session_data = self.encode(self._get_session(no_load=must_create)) | ||||
|         else: | ||||
|             s.session_data = self._get_session(no_load=must_create) | ||||
|         s.expire_date = self.get_expiry_date() | ||||
|         try: | ||||
|             s.save(force_insert=must_create, safe=True) | ||||
|   | ||||
| @@ -1,4 +1,3 @@ | ||||
| from django.http import Http404 | ||||
| from mongoengine.queryset import QuerySet | ||||
| from mongoengine.base import BaseDocument | ||||
| from mongoengine.base import ValidationError | ||||
| @@ -27,6 +26,7 @@ def get_document_or_404(cls, *args, **kwargs): | ||||
|     try: | ||||
|         return queryset.get(*args, **kwargs) | ||||
|     except (queryset._document.DoesNotExist, ValidationError): | ||||
|         from django.http import Http404 | ||||
|         raise Http404('No %s matches the given query.' % queryset._document._class_name) | ||||
|  | ||||
| def get_list_or_404(cls, *args, **kwargs): | ||||
| @@ -42,5 +42,6 @@ def get_list_or_404(cls, *args, **kwargs): | ||||
|     queryset = _get_queryset(cls) | ||||
|     obj_list = list(queryset.filter(*args, **kwargs)) | ||||
|     if not obj_list: | ||||
|         from django.http import Http404 | ||||
|         raise Http404('No %s matches the given query.' % queryset._document._class_name) | ||||
|     return obj_list | ||||
|   | ||||
| @@ -1,10 +1,28 @@ | ||||
| #coding: utf-8 | ||||
| from django.test import TestCase | ||||
| from django.conf import settings | ||||
| from nose.plugins.skip import SkipTest | ||||
|  | ||||
| from mongoengine.python_support import PY3 | ||||
| from mongoengine import connect | ||||
|  | ||||
| try: | ||||
|     from django.test import TestCase | ||||
|     from django.conf import settings | ||||
| except Exception as err: | ||||
|     if PY3: | ||||
|         from unittest import TestCase | ||||
|         # Dummy value so no error | ||||
|         class settings: | ||||
|             MONGO_DATABASE_NAME = 'dummy' | ||||
|     else: | ||||
|         raise err | ||||
|  | ||||
|  | ||||
| class MongoTestCase(TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         if PY3: | ||||
|             raise SkipTest('django does not have Python 3 support') | ||||
|  | ||||
|     """ | ||||
|     TestCase class that clear the collection between the tests | ||||
|     """ | ||||
|   | ||||
| @@ -1,15 +1,19 @@ | ||||
| import warnings | ||||
|  | ||||
| import pymongo | ||||
| import re | ||||
|  | ||||
| from bson.dbref import DBRef | ||||
| from mongoengine import signals, queryset | ||||
|  | ||||
| from mongoengine import signals | ||||
| from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, | ||||
|                   BaseDict, BaseList) | ||||
| from queryset import OperationError | ||||
| from queryset import OperationError, NotUniqueError | ||||
| from connection import get_db, DEFAULT_CONNECTION_NAME | ||||
|  | ||||
| __all__ = ['Document', 'EmbeddedDocument', 'DynamicDocument', | ||||
|            'DynamicEmbeddedDocument', 'OperationError', 'InvalidCollectionError'] | ||||
|            'DynamicEmbeddedDocument', 'OperationError', | ||||
|            'InvalidCollectionError', 'NotUniqueError'] | ||||
|  | ||||
|  | ||||
| class InvalidCollectionError(Exception): | ||||
| @@ -21,8 +25,19 @@ class EmbeddedDocument(BaseDocument): | ||||
|     collection.  :class:`~mongoengine.EmbeddedDocument`\ s should be used as | ||||
|     fields on :class:`~mongoengine.Document`\ s through the | ||||
|     :class:`~mongoengine.EmbeddedDocumentField` field type. | ||||
|  | ||||
|     A :class:`~mongoengine.EmbeddedDocument` subclass may be itself subclassed, | ||||
|     to create a specialised version of the embedded document that will be | ||||
|     stored in the same collection. To facilitate this behaviour, `_cls` and | ||||
|     `_types` fields are added to documents (hidden though the MongoEngine | ||||
|     interface though). To disable this behaviour and remove the dependence on | ||||
|     the presence of `_cls` and `_types`, set :attr:`allow_inheritance` to | ||||
|     ``False`` in the :attr:`meta` dictionary. | ||||
|     """ | ||||
|  | ||||
|     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||
|     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||
|     my_metaclass  = DocumentMetaclass | ||||
|     __metaclass__ = DocumentMetaclass | ||||
|  | ||||
|     def __init__(self, *args, **kwargs): | ||||
| @@ -91,9 +106,12 @@ class Document(BaseDocument): | ||||
|     disabled by either setting types to False on the specific index or | ||||
|     by setting index_types to False on the meta dictionary for the document. | ||||
|     """ | ||||
|  | ||||
|     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||
|     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||
|     my_metaclass  = TopLevelDocumentMetaclass | ||||
|     __metaclass__ = TopLevelDocumentMetaclass | ||||
|  | ||||
|     @apply | ||||
|     def pk(): | ||||
|         """Primary key alias | ||||
|         """ | ||||
| @@ -102,6 +120,7 @@ class Document(BaseDocument): | ||||
|         def fset(self, value): | ||||
|             return setattr(self, self._meta['id_field'], value) | ||||
|         return property(fget, fset) | ||||
|     pk = pk() | ||||
|  | ||||
|     @classmethod | ||||
|     def _get_db(cls): | ||||
| @@ -127,8 +146,9 @@ class Document(BaseDocument): | ||||
|                     options = cls._collection.options() | ||||
|                     if options.get('max') != max_documents or \ | ||||
|                        options.get('size') != max_size: | ||||
|                         msg = ('Cannot create collection "%s" as a capped ' | ||||
|                                'collection as it already exists') % cls._collection | ||||
|                         msg = (('Cannot create collection "%s" as a capped ' | ||||
|                                'collection as it already exists') | ||||
|                                 % cls._collection) | ||||
|                         raise InvalidCollectionError(msg) | ||||
|                 else: | ||||
|                     # Create the collection as a capped collection | ||||
| @@ -142,8 +162,9 @@ class Document(BaseDocument): | ||||
|                 cls._collection = db[collection_name] | ||||
|         return cls._collection | ||||
|  | ||||
|     def save(self, safe=True, force_insert=False, validate=True, write_options=None, | ||||
|             cascade=None, cascade_kwargs=None, _refs=None): | ||||
|     def save(self, safe=True, force_insert=False, validate=True, | ||||
|              write_options=None,  cascade=None, cascade_kwargs=None, | ||||
|              _refs=None, **kwargs): | ||||
|         """Save the :class:`~mongoengine.Document` to the database. If the | ||||
|         document already exists, it will be updated, otherwise it will be | ||||
|         created. | ||||
| @@ -156,27 +177,30 @@ class Document(BaseDocument): | ||||
|             updates of existing documents | ||||
|         :param validate: validates the document; set to ``False`` to skip. | ||||
|         :param write_options: Extra keyword arguments are passed down to | ||||
|                 :meth:`~pymongo.collection.Collection.save` OR | ||||
|                 :meth:`~pymongo.collection.Collection.insert` | ||||
|                 which will be used as options for the resultant ``getLastError`` command. | ||||
|                 For example, ``save(..., write_options={w: 2, fsync: True}, ...)`` will | ||||
|                 wait until at least two servers have recorded the write and will force an | ||||
|                 fsync on each server being written to. | ||||
|         :param cascade: Sets the flag for cascading saves.  You can set a default by setting | ||||
|             "cascade" in the document __meta__ | ||||
|         :param cascade_kwargs: optional kwargs dictionary to be passed throw to cascading saves | ||||
|             :meth:`~pymongo.collection.Collection.save` OR | ||||
|             :meth:`~pymongo.collection.Collection.insert` | ||||
|             which will be used as options for the resultant | ||||
|             ``getLastError`` command.  For example, | ||||
|             ``save(..., write_options={w: 2, fsync: True}, ...)`` will | ||||
|             wait until at least two servers have recorded the write and | ||||
|             will force an fsync on the primary server. | ||||
|         :param cascade: Sets the flag for cascading saves.  You can set a | ||||
|             default by setting "cascade" in the document __meta__ | ||||
|         :param cascade_kwargs: optional kwargs dictionary to be passed throw | ||||
|             to cascading saves | ||||
|         :param _refs: A list of processed references used in cascading saves | ||||
|  | ||||
|         .. versionchanged:: 0.5 | ||||
|             In existing documents it only saves changed fields using set / unset | ||||
|             Saves are cascaded and any :class:`~bson.dbref.DBRef` objects | ||||
|             that have changes are saved as well. | ||||
|             In existing documents it only saves changed fields using | ||||
|             set / unset.  Saves are cascaded and any | ||||
|             :class:`~bson.dbref.DBRef` objects that have changes are | ||||
|             saved as well. | ||||
|         .. versionchanged:: 0.6 | ||||
|             Cascade saves are optional = defaults to True, if you want fine grain | ||||
|             control then you can turn off using document meta['cascade'] = False | ||||
|             Also you can pass different kwargs to the cascade save using cascade_kwargs | ||||
|             which overwrites the existing kwargs with custom values | ||||
|  | ||||
|             Cascade saves are optional = defaults to True, if you want | ||||
|             fine grain control then you can turn off using document | ||||
|             meta['cascade'] = False  Also you can pass different kwargs to | ||||
|             the cascade save using cascade_kwargs which overwrites the | ||||
|             existing kwargs with custom values | ||||
|         """ | ||||
|         signals.pre_save.send(self.__class__, document=self) | ||||
|  | ||||
| @@ -194,13 +218,14 @@ class Document(BaseDocument): | ||||
|             collection = self.__class__.objects._collection | ||||
|             if created: | ||||
|                 if force_insert: | ||||
|                     object_id = collection.insert(doc, safe=safe, **write_options) | ||||
|                     object_id = collection.insert(doc, safe=safe, | ||||
|                                                   **write_options) | ||||
|                 else: | ||||
|                     object_id = collection.save(doc, safe=safe, **write_options) | ||||
|                     object_id = collection.save(doc, safe=safe, | ||||
|                                                 **write_options) | ||||
|             else: | ||||
|                 object_id = doc['_id'] | ||||
|                 updates, removals = self._delta() | ||||
|  | ||||
|                 # Need to add shard key to query, or you get an error | ||||
|                 select_dict = {'_id': object_id} | ||||
|                 shard_key = self.__class__._meta.get('shard_key', tuple()) | ||||
| @@ -210,11 +235,15 @@ class Document(BaseDocument): | ||||
|  | ||||
|                 upsert = self._created | ||||
|                 if updates: | ||||
|                     collection.update(select_dict, {"$set": updates}, upsert=upsert, safe=safe, **write_options) | ||||
|                     collection.update(select_dict, {"$set": updates}, | ||||
|                         upsert=upsert, safe=safe, **write_options) | ||||
|                 if removals: | ||||
|                     collection.update(select_dict, {"$unset": removals}, upsert=upsert, safe=safe, **write_options) | ||||
|                     collection.update(select_dict, {"$unset": removals}, | ||||
|                         upsert=upsert, safe=safe, **write_options) | ||||
|  | ||||
|             cascade = self._meta.get('cascade', True) if cascade is None else cascade | ||||
|             warn_cascade = not cascade and 'cascade' not in self._meta | ||||
|             cascade = (self._meta.get('cascade', True) | ||||
|                        if cascade is None else cascade) | ||||
|             if cascade: | ||||
|                 kwargs = { | ||||
|                     "safe": safe, | ||||
| @@ -226,45 +255,64 @@ class Document(BaseDocument): | ||||
|                 if cascade_kwargs:  # Allow granular control over cascades | ||||
|                     kwargs.update(cascade_kwargs) | ||||
|                 kwargs['_refs'] = _refs | ||||
|                 #self._changed_fields = [] | ||||
|                 self.cascade_save(**kwargs) | ||||
|                 self.cascade_save(warn_cascade=warn_cascade, **kwargs) | ||||
|  | ||||
|         except pymongo.errors.OperationFailure, err: | ||||
|             message = 'Could not save document (%s)' | ||||
|             if u'duplicate key' in unicode(err): | ||||
|             if re.match('^E1100[01] duplicate key', unicode(err)): | ||||
|                 # E11000 - duplicate key error index | ||||
|                 # E11001 - duplicate key on update | ||||
|                 message = u'Tried to save duplicate unique keys (%s)' | ||||
|                 raise NotUniqueError(message % unicode(err)) | ||||
|             raise OperationError(message % unicode(err)) | ||||
|         id_field = self._meta['id_field'] | ||||
|         self[id_field] = self._fields[id_field].to_python(object_id) | ||||
|         if id_field not in self._meta.get('shard_key', []): | ||||
|             self[id_field] = self._fields[id_field].to_python(object_id) | ||||
|  | ||||
|         self._changed_fields = [] | ||||
|         self._clear_changed_fields() | ||||
|         self._created = False | ||||
|         signals.post_save.send(self.__class__, document=self, created=created) | ||||
|         return self | ||||
|  | ||||
|     def cascade_save(self, *args, **kwargs): | ||||
|         """Recursively saves any references / generic references on an object""" | ||||
|         from fields import ReferenceField, GenericReferenceField | ||||
|     def cascade_save(self, warn_cascade=None, *args, **kwargs): | ||||
|         """Recursively saves any references / | ||||
|            generic references on an objects""" | ||||
|         import fields | ||||
|         _refs = kwargs.get('_refs', []) or [] | ||||
|  | ||||
|         for name, cls in self._fields.items(): | ||||
|  | ||||
|             if not isinstance(cls, (ReferenceField, GenericReferenceField)): | ||||
|             if not isinstance(cls, (fields.ReferenceField, | ||||
|                                     fields.GenericReferenceField)): | ||||
|                 continue | ||||
|  | ||||
|             ref = getattr(self, name) | ||||
|             if not ref: | ||||
|             if not ref or isinstance(ref, DBRef): | ||||
|                 continue | ||||
|             if isinstance(ref, DBRef): | ||||
|  | ||||
|             if not getattr(ref, '_changed_fields', True): | ||||
|                 continue | ||||
|  | ||||
|             ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data)) | ||||
|             if ref and ref_id not in _refs: | ||||
|                 if warn_cascade: | ||||
|                     msg = ("Cascading saves will default to off in 0.8, " | ||||
|                           "please  explicitly set `.save(cascade=True)`") | ||||
|                     warnings.warn(msg, FutureWarning) | ||||
|                 _refs.append(ref_id) | ||||
|                 kwargs["_refs"] = _refs | ||||
|                 ref.save(**kwargs) | ||||
|                 ref._changed_fields = [] | ||||
|  | ||||
|     @property | ||||
|     def _object_key(self): | ||||
|         """Dict to identify object in collection | ||||
|         """ | ||||
|         select_dict = {'pk': self.pk} | ||||
|         shard_key = self.__class__._meta.get('shard_key', tuple()) | ||||
|         for k in shard_key: | ||||
|             select_dict[k] = getattr(self, k) | ||||
|         return select_dict | ||||
|  | ||||
|     def update(self, **kwargs): | ||||
|         """Performs an update on the :class:`~mongoengine.Document` | ||||
|         A convenience wrapper to :meth:`~mongoengine.QuerySet.update`. | ||||
| @@ -276,11 +324,7 @@ class Document(BaseDocument): | ||||
|             raise OperationError('attempt to update a document not yet saved') | ||||
|  | ||||
|         # Need to add shard key to query, or you get an error | ||||
|         select_dict = {'pk': self.pk} | ||||
|         shard_key = self.__class__._meta.get('shard_key', tuple()) | ||||
|         for k in shard_key: | ||||
|             select_dict[k] = getattr(self, k) | ||||
|         return self.__class__.objects(**select_dict).update_one(**kwargs) | ||||
|         return self.__class__.objects(**self._object_key).update_one(**kwargs) | ||||
|  | ||||
|     def delete(self, safe=False): | ||||
|         """Delete the :class:`~mongoengine.Document` from the database. This | ||||
| @@ -291,7 +335,7 @@ class Document(BaseDocument): | ||||
|         signals.pre_delete.send(self.__class__, document=self) | ||||
|  | ||||
|         try: | ||||
|             self.__class__.objects(pk=self.pk).delete(safe=safe) | ||||
|             self.__class__.objects(**self._object_key).delete(safe=safe) | ||||
|         except pymongo.errors.OperationFailure, err: | ||||
|             message = u'Could not delete document (%s)' % err.message | ||||
|             raise OperationError(message) | ||||
| @@ -304,8 +348,8 @@ class Document(BaseDocument): | ||||
|  | ||||
|         .. versionadded:: 0.5 | ||||
|         """ | ||||
|         from dereference import DeReference | ||||
|         self._data = DeReference()(self._data, max_depth) | ||||
|         import dereference | ||||
|         self._data = dereference.DeReference()(self._data, max_depth) | ||||
|         return self | ||||
|  | ||||
|     def reload(self, max_depth=1): | ||||
| @@ -317,7 +361,12 @@ class Document(BaseDocument): | ||||
|         id_field = self._meta['id_field'] | ||||
|         obj = self.__class__.objects( | ||||
|                 **{id_field: self[id_field]} | ||||
|               ).first().select_related(max_depth=max_depth) | ||||
|               ).limit(1).select_related(max_depth=max_depth) | ||||
|         if obj: | ||||
|             obj = obj[0] | ||||
|         else: | ||||
|             msg = "Reloaded document has been deleted" | ||||
|             raise OperationError(msg) | ||||
|         for field in self._fields: | ||||
|             setattr(self, field, self._reload(field, obj[field])) | ||||
|         if self._dynamic: | ||||
| @@ -353,17 +402,18 @@ class Document(BaseDocument): | ||||
|         """This method registers the delete rules to apply when removing this | ||||
|         object. | ||||
|         """ | ||||
|         cls._meta['delete_rules'][(document_cls, field_name)] = rule | ||||
|         delete_rules = cls._meta.get('delete_rules') or {} | ||||
|         delete_rules[(document_cls, field_name)] = rule | ||||
|         cls._meta['delete_rules'] = delete_rules | ||||
|  | ||||
|     @classmethod | ||||
|     def drop_collection(cls): | ||||
|         """Drops the entire collection associated with this | ||||
|         :class:`~mongoengine.Document` type from the database. | ||||
|         """ | ||||
|         from mongoengine.queryset import QuerySet | ||||
|         db = cls._get_db() | ||||
|         db.drop_collection(cls._get_collection_name()) | ||||
|         QuerySet._reset_already_indexed(cls) | ||||
|         queryset.QuerySet._reset_already_indexed(cls) | ||||
|  | ||||
|  | ||||
| class DynamicDocument(Document): | ||||
| @@ -375,11 +425,16 @@ class DynamicDocument(Document): | ||||
|     :class:`~mongoengine.DynamicField` and data can be attributed to that | ||||
|     field. | ||||
|  | ||||
|     ..note:: | ||||
|     .. note:: | ||||
|  | ||||
|         There is one caveat on Dynamic Documents: fields cannot start with `_` | ||||
|     """ | ||||
|  | ||||
|     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||
|     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||
|     my_metaclass  = TopLevelDocumentMetaclass | ||||
|     __metaclass__ = TopLevelDocumentMetaclass | ||||
|  | ||||
|     _dynamic = True | ||||
|  | ||||
|     def __delattr__(self, *args, **kwargs): | ||||
| @@ -398,7 +453,11 @@ class DynamicEmbeddedDocument(EmbeddedDocument): | ||||
|     information about dynamic documents. | ||||
|     """ | ||||
|  | ||||
|     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||
|     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||
|     my_metaclass  = DocumentMetaclass | ||||
|     __metaclass__ = DocumentMetaclass | ||||
|  | ||||
|     _dynamic = True | ||||
|  | ||||
|     def __delattr__(self, *args, **kwargs): | ||||
|   | ||||
| @@ -1,18 +1,24 @@ | ||||
| import datetime | ||||
| import time | ||||
| import decimal | ||||
| import gridfs | ||||
| import itertools | ||||
| import re | ||||
| import time | ||||
| import urllib2 | ||||
| import urlparse | ||||
| import uuid | ||||
| import warnings | ||||
| from operator import itemgetter | ||||
|  | ||||
| import gridfs | ||||
| from bson import Binary, DBRef, SON, ObjectId | ||||
|  | ||||
| from mongoengine.python_support import (PY3, bin_type, txt_type, | ||||
|                                         str_types, StringIO) | ||||
| from base import (BaseField, ComplexBaseField, ObjectIdField, | ||||
|                   ValidationError, get_document, BaseDocument) | ||||
| from queryset import DO_NOTHING, QuerySet | ||||
| from document import Document, EmbeddedDocument | ||||
| from connection import get_db, DEFAULT_CONNECTION_NAME | ||||
| from operator import itemgetter | ||||
|  | ||||
|  | ||||
| try: | ||||
| @@ -21,13 +27,7 @@ except ImportError: | ||||
|     Image = None | ||||
|     ImageOps = None | ||||
|  | ||||
| try: | ||||
|     from cStringIO import StringIO | ||||
| except ImportError: | ||||
|     from StringIO import StringIO | ||||
|  | ||||
|  | ||||
| __all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField', | ||||
| __all__ = ['StringField', 'IntField', 'LongField', 'FloatField', 'BooleanField', | ||||
|            'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField', | ||||
|            'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField', | ||||
|            'DecimalField', 'ComplexDateTimeField', 'URLField', 'DynamicField', | ||||
| @@ -51,8 +51,11 @@ class StringField(BaseField): | ||||
|     def to_python(self, value): | ||||
|         if isinstance(value, unicode): | ||||
|             return value | ||||
|         else: | ||||
|             return value.decode('utf-8') | ||||
|         try: | ||||
|             value = value.decode('utf-8') | ||||
|         except: | ||||
|             pass | ||||
|         return value | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if not isinstance(value, basestring): | ||||
| @@ -100,25 +103,30 @@ class URLField(StringField): | ||||
|     .. versionadded:: 0.3 | ||||
|     """ | ||||
|  | ||||
|     URL_REGEX = re.compile( | ||||
|         r'^https?://' | ||||
|         r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?|' | ||||
|         r'localhost|' | ||||
|         r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' | ||||
|         r'(?::\d+)?' | ||||
|         r'(?:/?|[/?]\S+)$', re.IGNORECASE | ||||
|     ) | ||||
|     _URL_REGEX = re.compile( | ||||
|         r'^(?:http|ftp)s?://' # http:// or https:// | ||||
|         r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain... | ||||
|         r'localhost|' #localhost... | ||||
|         r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip | ||||
|         r'(?::\d+)?' # optional port | ||||
|         r'(?:/?|[/?]\S+)$', re.IGNORECASE) | ||||
|  | ||||
|     def __init__(self, verify_exists=False, **kwargs): | ||||
|     def __init__(self, verify_exists=False, url_regex=None, **kwargs): | ||||
|         self.verify_exists = verify_exists | ||||
|         self.url_regex = url_regex or self._URL_REGEX | ||||
|         super(URLField, self).__init__(**kwargs) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if not URLField.URL_REGEX.match(value): | ||||
|         if not self.url_regex.match(value): | ||||
|             self.error('Invalid URL: %s' % value) | ||||
|             return | ||||
|  | ||||
|         if self.verify_exists: | ||||
|             import urllib2 | ||||
|             warnings.warn( | ||||
|                 "The URLField verify_exists argument has intractable security " | ||||
|                 "and performance issues. Accordingly, it has been deprecated.", | ||||
|             DeprecationWarning | ||||
|             ) | ||||
|             try: | ||||
|                 request = urllib2.Request(value) | ||||
|                 urllib2.urlopen(request) | ||||
| @@ -135,16 +143,17 @@ class EmailField(StringField): | ||||
|     EMAIL_REGEX = re.compile( | ||||
|         r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*"  # dot-atom | ||||
|         r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"'  # quoted-string | ||||
|         r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?$', re.IGNORECASE  # domain | ||||
|         r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,253}[A-Z0-9])?\.)+[A-Z]{2,6}\.?$', re.IGNORECASE  # domain | ||||
|     ) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if not EmailField.EMAIL_REGEX.match(value): | ||||
|             self.error('Invalid Mail-address: %s' % value) | ||||
|         super(EmailField, self).validate(value) | ||||
|  | ||||
|  | ||||
| class IntField(BaseField): | ||||
|     """An integer field. | ||||
|     """An 32-bit integer field. | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, min_value=None, max_value=None, **kwargs): | ||||
| @@ -152,7 +161,11 @@ class IntField(BaseField): | ||||
|         super(IntField, self).__init__(**kwargs) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         return int(value) | ||||
|         try: | ||||
|             value = int(value) | ||||
|         except ValueError: | ||||
|             pass | ||||
|         return value | ||||
|  | ||||
|     def validate(self, value): | ||||
|         try: | ||||
| @@ -169,10 +182,44 @@ class IntField(BaseField): | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if value is None: | ||||
|             return value | ||||
|          | ||||
|  | ||||
|         return int(value) | ||||
|  | ||||
|  | ||||
| class LongField(BaseField): | ||||
|     """An 64-bit integer field. | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, min_value=None, max_value=None, **kwargs): | ||||
|         self.min_value, self.max_value = min_value, max_value | ||||
|         super(LongField, self).__init__(**kwargs) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         try: | ||||
|             value = long(value) | ||||
|         except ValueError: | ||||
|             pass | ||||
|         return value | ||||
|  | ||||
|     def validate(self, value): | ||||
|         try: | ||||
|             value = long(value) | ||||
|         except: | ||||
|             self.error('%s could not be converted to long' % value) | ||||
|  | ||||
|         if self.min_value is not None and value < self.min_value: | ||||
|             self.error('Long value is too small') | ||||
|  | ||||
|         if self.max_value is not None and value > self.max_value: | ||||
|             self.error('Long value is too large') | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if value is None: | ||||
|             return value | ||||
|  | ||||
|         return long(value) | ||||
|  | ||||
|  | ||||
| class FloatField(BaseField): | ||||
|     """An floating point number field. | ||||
|     """ | ||||
| @@ -182,7 +229,11 @@ class FloatField(BaseField): | ||||
|         super(FloatField, self).__init__(**kwargs) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         return float(value) | ||||
|         try: | ||||
|             value = float(value) | ||||
|         except ValueError: | ||||
|             pass | ||||
|         return value | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if isinstance(value, int): | ||||
| @@ -199,7 +250,7 @@ class FloatField(BaseField): | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if value is None: | ||||
|             return value | ||||
|          | ||||
|  | ||||
|         return float(value) | ||||
|  | ||||
|  | ||||
| @@ -214,9 +265,14 @@ class DecimalField(BaseField): | ||||
|         super(DecimalField, self).__init__(**kwargs) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         original_value = value | ||||
|         if not isinstance(value, basestring): | ||||
|             value = unicode(value) | ||||
|         return decimal.Decimal(value) | ||||
|         try: | ||||
|             value = decimal.Decimal(value) | ||||
|         except ValueError: | ||||
|             return original_value | ||||
|         return value | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         return unicode(value) | ||||
| @@ -244,7 +300,11 @@ class BooleanField(BaseField): | ||||
|     """ | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         return bool(value) | ||||
|         try: | ||||
|             value = bool(value) | ||||
|         except ValueError: | ||||
|             pass | ||||
|         return value | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if not isinstance(value, bool): | ||||
| @@ -375,6 +435,8 @@ class ComplexDateTimeField(StringField): | ||||
|         data = super(ComplexDateTimeField, self).__get__(instance, owner) | ||||
|         if data == None: | ||||
|             return datetime.datetime.now() | ||||
|         if isinstance(data, datetime.datetime): | ||||
|             return data | ||||
|         return self._convert_from_string(data) | ||||
|  | ||||
|     def __set__(self, instance, value): | ||||
| @@ -387,7 +449,11 @@ class ComplexDateTimeField(StringField): | ||||
|                        'ComplexDateTimeField') | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         return self._convert_from_string(value) | ||||
|         original_value = value | ||||
|         try: | ||||
|             return self._convert_from_string(value) | ||||
|         except: | ||||
|             return original_value | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         return self._convert_from_datetime(value) | ||||
| @@ -451,8 +517,9 @@ class GenericEmbeddedDocumentField(BaseField): | ||||
|  | ||||
|     Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`. | ||||
|  | ||||
|     ..note :: You can use the choices param to limit the acceptable | ||||
|     EmbeddedDocument types | ||||
|     .. note :: | ||||
|         You can use the choices param to limit the acceptable | ||||
|         EmbeddedDocument types | ||||
|     """ | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
| @@ -483,7 +550,7 @@ class GenericEmbeddedDocumentField(BaseField): | ||||
|  | ||||
|  | ||||
| class DynamicField(BaseField): | ||||
|     """A tryly dynamic field type capable of handling different and varying | ||||
|     """A truly dynamic field type capable of handling different and varying | ||||
|     types of data. | ||||
|  | ||||
|     Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data""" | ||||
| @@ -530,6 +597,8 @@ class ListField(ComplexBaseField): | ||||
|     """A list field that wraps a standard field, allowing multiple instances | ||||
|     of the field to be used as a list in the database. | ||||
|  | ||||
|     If using with ReferenceFields see: :ref:`one-to-many-with-listfields` | ||||
|  | ||||
|     .. note:: | ||||
|         Required means it cannot be empty - as the default for ListFields is [] | ||||
|     """ | ||||
| @@ -668,7 +737,8 @@ class ReferenceField(BaseField): | ||||
|       * NULLIFY     - Updates the reference to null. | ||||
|       * CASCADE     - Deletes the documents associated with the reference. | ||||
|       * DENY        - Prevent the deletion of the reference object. | ||||
|       * PULL        - Pull the reference from a :class:`~mongoengine.ListField` of references | ||||
|       * PULL        - Pull the reference from a :class:`~mongoengine.ListField` | ||||
|                       of references | ||||
|  | ||||
|     Alternative syntax for registering delete rules (useful when implementing | ||||
|     bi-directional delete rules) | ||||
| @@ -681,12 +751,19 @@ class ReferenceField(BaseField): | ||||
|  | ||||
|         Bar.register_delete_rule(Foo, 'bar', NULLIFY) | ||||
|  | ||||
|     .. note :: | ||||
|         `reverse_delete_rules` do not trigger pre / post delete signals to be | ||||
|         triggered. | ||||
|  | ||||
|     .. versionchanged:: 0.5 added `reverse_delete_rule` | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, document_type, reverse_delete_rule=DO_NOTHING, **kwargs): | ||||
|     def __init__(self, document_type, dbref=None, | ||||
|                  reverse_delete_rule=DO_NOTHING, **kwargs): | ||||
|         """Initialises the Reference Field. | ||||
|  | ||||
|         :param dbref:  Store the reference as :class:`~pymongo.dbref.DBRef` | ||||
|           or as the :class:`~pymongo.objectid.ObjectId`.id . | ||||
|         :param reverse_delete_rule: Determines what to do when the referring | ||||
|           object is deleted | ||||
|         """ | ||||
| @@ -694,6 +771,13 @@ class ReferenceField(BaseField): | ||||
|             if not issubclass(document_type, (Document, basestring)): | ||||
|                 self.error('Argument to ReferenceField constructor must be a ' | ||||
|                            'document class or a string') | ||||
|  | ||||
|         if dbref is None: | ||||
|             msg = ("ReferenceFields will default to using ObjectId " | ||||
|                    " strings in 0.8, set DBRef=True if this isn't desired") | ||||
|             warnings.warn(msg, FutureWarning) | ||||
|  | ||||
|         self.dbref = dbref if dbref is not None else True  # To change in 0.8 | ||||
|         self.document_type_obj = document_type | ||||
|         self.reverse_delete_rule = reverse_delete_rule | ||||
|         super(ReferenceField, self).__init__(**kwargs) | ||||
| @@ -716,8 +800,9 @@ class ReferenceField(BaseField): | ||||
|  | ||||
|         # Get value from document instance if available | ||||
|         value = instance._data.get(self.name) | ||||
|  | ||||
|         # Dereference DBRefs | ||||
|         if isinstance(value, (DBRef)): | ||||
|         if isinstance(value, DBRef): | ||||
|             value = self.document_type._get_db().dereference(value) | ||||
|             if value is not None: | ||||
|                 instance._data[self.name] = self.document_type._from_son(value) | ||||
| @@ -726,6 +811,10 @@ class ReferenceField(BaseField): | ||||
|  | ||||
|     def to_mongo(self, document): | ||||
|         if isinstance(document, DBRef): | ||||
|             if not self.dbref: | ||||
|                 return document.id | ||||
|             return document | ||||
|         elif not self.dbref and isinstance(document, basestring): | ||||
|             return document | ||||
|  | ||||
|         id_field_name = self.document_type._meta['id_field'] | ||||
| @@ -733,7 +822,7 @@ class ReferenceField(BaseField): | ||||
|  | ||||
|         if isinstance(document, Document): | ||||
|             # We need the id from the saved object to create the DBRef | ||||
|             id_ = document.id | ||||
|             id_ = document.pk | ||||
|             if id_ is None: | ||||
|                 self.error('You can only reference documents once they have' | ||||
|                            ' been saved to the database') | ||||
| @@ -741,18 +830,30 @@ class ReferenceField(BaseField): | ||||
|             id_ = document | ||||
|  | ||||
|         id_ = id_field.to_mongo(id_) | ||||
|         collection = self.document_type._get_collection_name() | ||||
|         return DBRef(collection, id_) | ||||
|         if self.dbref: | ||||
|             collection = self.document_type._get_collection_name() | ||||
|             return DBRef(collection, id_) | ||||
|  | ||||
|         return id_ | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         """Convert a MongoDB-compatible type to a Python type. | ||||
|         """ | ||||
|         if (not self.dbref and | ||||
|             not isinstance(value, (DBRef, Document, EmbeddedDocument))): | ||||
|             collection = self.document_type._get_collection_name() | ||||
|             value = DBRef(collection, self.document_type.id.to_python(value)) | ||||
|         return value | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if value is None: | ||||
|             return None | ||||
|  | ||||
|         return self.to_mongo(value) | ||||
|  | ||||
|     def validate(self, value): | ||||
|  | ||||
|         if not isinstance(value, (self.document_type, DBRef)): | ||||
|             self.error('A ReferenceField only accepts DBRef') | ||||
|             self.error("A ReferenceField only accepts DBRef or documents") | ||||
|  | ||||
|         if isinstance(value, Document) and value.id is None: | ||||
|             self.error('You can only reference documents once they have been ' | ||||
| @@ -766,10 +867,12 @@ class GenericReferenceField(BaseField): | ||||
|     """A reference to *any* :class:`~mongoengine.document.Document` subclass | ||||
|     that will be automatically dereferenced on access (lazily). | ||||
|  | ||||
|     ..note ::  Any documents used as a generic reference must be registered in the | ||||
|     document registry.  Importing the model will automatically register it. | ||||
|     .. note :: | ||||
|         * Any documents used as a generic reference must be registered in the | ||||
|           document registry.  Importing the model will automatically register | ||||
|           it. | ||||
|  | ||||
|     ..note :: You can use the choices param to limit the acceptable Document types | ||||
|         * You can use the choices param to limit the acceptable Document types | ||||
|  | ||||
|     .. versionadded:: 0.3 | ||||
|     """ | ||||
| @@ -840,15 +943,20 @@ class BinaryField(BaseField): | ||||
|         self.max_bytes = max_bytes | ||||
|         super(BinaryField, self).__init__(**kwargs) | ||||
|  | ||||
|     def __set__(self, instance, value): | ||||
|         """Handle bytearrays in python 3.1""" | ||||
|         if PY3 and isinstance(value, bytearray): | ||||
|             value = bin_type(value) | ||||
|         return super(BinaryField, self).__set__(instance, value) | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         return Binary(value) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         return "%s" % value | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if not isinstance(value, basestring): | ||||
|             self.error('BinaryField only accepts string values') | ||||
|         if not isinstance(value, (bin_type, txt_type, Binary)): | ||||
|             self.error("BinaryField only accepts instances of " | ||||
|                        "(%s, %s, Binary)" % ( | ||||
|                         bin_type.__name__, txt_type.__name__)) | ||||
|  | ||||
|         if self.max_bytes is not None and len(value) > self.max_bytes: | ||||
|             self.error('Binary value is too long') | ||||
| @@ -904,9 +1012,13 @@ class GridFSProxy(object): | ||||
|     def __repr__(self): | ||||
|         return '<%s: %s>' % (self.__class__.__name__, self.grid_id) | ||||
|  | ||||
|     def __cmp__(self, other): | ||||
|         return cmp((self.grid_id, self.collection_name, self.db_alias), | ||||
|                    (other.grid_id, other.collection_name, other.db_alias)) | ||||
|     def __eq__(self, other): | ||||
|         if isinstance(other, GridFSProxy): | ||||
|             return  ((self.grid_id == other.grid_id) and | ||||
|                      (self.collection_name == other.collection_name) and | ||||
|                      (self.db_alias == other.db_alias)) | ||||
|         else: | ||||
|             return False | ||||
|  | ||||
|     @property | ||||
|     def fs(self): | ||||
| @@ -1019,7 +1131,8 @@ class FileField(BaseField): | ||||
|  | ||||
|     def __set__(self, instance, value): | ||||
|         key = self.name | ||||
|         if (hasattr(value, 'read') and not isinstance(value, GridFSProxy)) or isinstance(value, basestring): | ||||
|         if ((hasattr(value, 'read') and not | ||||
|              isinstance(value, GridFSProxy)) or isinstance(value, str_types)): | ||||
|             # using "FileField() = file/string" notation | ||||
|             grid_file = instance._data.get(self.name) | ||||
|             # If a file already exists, delete it | ||||
| @@ -1075,6 +1188,7 @@ class ImageGridFsProxy(GridFSProxy): | ||||
|  | ||||
|         try: | ||||
|             img = Image.open(file_obj) | ||||
|             img_format = img.format | ||||
|         except: | ||||
|             raise ValidationError('Invalid image') | ||||
|  | ||||
| @@ -1109,20 +1223,20 @@ class ImageGridFsProxy(GridFSProxy): | ||||
|  | ||||
|         if thumbnail: | ||||
|             thumb_id = self._put_thumbnail(thumbnail, | ||||
|                                           img.format) | ||||
|                                           img_format) | ||||
|         else: | ||||
|             thumb_id = None | ||||
|  | ||||
|         w, h = img.size | ||||
|  | ||||
|         io = StringIO() | ||||
|         img.save(io, img.format) | ||||
|         img.save(io, img_format) | ||||
|         io.seek(0) | ||||
|  | ||||
|         return super(ImageGridFsProxy, self).put(io, | ||||
|                                                  width=w, | ||||
|                                                  height=h, | ||||
|                                                  format=img.format, | ||||
|                                                  format=img_format, | ||||
|                                                  thumbnail_id=thumb_id, | ||||
|                                                  **kwargs) | ||||
|  | ||||
| @@ -1208,11 +1322,15 @@ class ImageField(FileField): | ||||
|         params_size = ('width', 'height', 'force') | ||||
|         extra_args = dict(size=size, thumbnail_size=thumbnail_size) | ||||
|         for att_name, att in extra_args.items(): | ||||
|             if att and (isinstance(att, tuple) or isinstance(att, list)): | ||||
|                 setattr(self, att_name, dict( | ||||
|                         map(None, params_size, att))) | ||||
|             else: | ||||
|                 setattr(self, att_name, None) | ||||
|             value = None | ||||
|             if isinstance(att, (tuple, list)): | ||||
|                 if PY3: | ||||
|                     value = dict(itertools.zip_longest(params_size, att, | ||||
|                                                         fillvalue=None)) | ||||
|                 else: | ||||
|                     value = dict(map(None, params_size, att)) | ||||
|  | ||||
|             setattr(self, att_name, value) | ||||
|  | ||||
|         super(ImageField, self).__init__( | ||||
|             collection_name=collection_name, | ||||
| @@ -1254,24 +1372,35 @@ class SequenceField(IntField): | ||||
|  | ||||
|     .. versionadded:: 0.5 | ||||
|     """ | ||||
|     def __init__(self, collection_name=None, db_alias = None, *args, **kwargs): | ||||
|     def __init__(self, collection_name=None, db_alias=None, sequence_name=None, *args, **kwargs): | ||||
|         self.collection_name = collection_name or 'mongoengine.counters' | ||||
|         self.db_alias = db_alias or DEFAULT_CONNECTION_NAME | ||||
|         self.sequence_name = sequence_name | ||||
|         return super(SequenceField, self).__init__(*args, **kwargs) | ||||
|  | ||||
|     def generate_new_value(self): | ||||
|         """ | ||||
|         Generate and Increment the counter | ||||
|         """ | ||||
|         sequence_id = "{0}.{1}".format(self.owner_document._get_collection_name(), | ||||
|                                        self.name) | ||||
|         collection = get_db(alias = self.db_alias )[self.collection_name] | ||||
|         sequence_name = self.get_sequence_name() | ||||
|         sequence_id = "%s.%s" % (sequence_name, self.name) | ||||
|         collection = get_db(alias=self.db_alias)[self.collection_name] | ||||
|         counter = collection.find_and_modify(query={"_id": sequence_id}, | ||||
|                                              update={"$inc": {"next": 1}}, | ||||
|                                              new=True, | ||||
|                                              upsert=True) | ||||
|         return counter['next'] | ||||
|  | ||||
|     def get_sequence_name(self): | ||||
|         if self.sequence_name: | ||||
|             return self.sequence_name | ||||
|         owner = self.owner_document | ||||
|         if issubclass(owner, Document): | ||||
|             return owner._get_collection_name() | ||||
|         else: | ||||
|             return ''.join('_%s' % c if c.isupper() else c | ||||
|                             for c in owner._class_name).strip('_').lower() | ||||
|  | ||||
|     def __get__(self, instance, owner): | ||||
|  | ||||
|         if instance is None: | ||||
| @@ -1287,7 +1416,7 @@ class SequenceField(IntField): | ||||
|             instance._data[self.name] = value | ||||
|             instance._mark_as_changed(self.name) | ||||
|  | ||||
|         return value | ||||
|         return int(value) if value else None | ||||
|  | ||||
|     def __set__(self, instance, value): | ||||
|  | ||||
| @@ -1307,17 +1436,44 @@ class UUIDField(BaseField): | ||||
|  | ||||
|     .. versionadded:: 0.6 | ||||
|     """ | ||||
|     _binary = None | ||||
|  | ||||
|     def __init__(self, **kwargs): | ||||
|     def __init__(self, binary=None, **kwargs): | ||||
|         """ | ||||
|         Store UUID data in the database | ||||
|  | ||||
|         :param binary: (optional) boolean store as binary. | ||||
|  | ||||
|         .. versionchanged:: 0.6.19 | ||||
|         """ | ||||
|         if binary is None: | ||||
|             binary = False | ||||
|             msg = ("UUIDFields will soon default to store as binary, please " | ||||
|                   "configure binary=False if you wish to store as a string") | ||||
|             warnings.warn(msg, FutureWarning) | ||||
|         self._binary = binary | ||||
|         super(UUIDField, self).__init__(**kwargs) | ||||
|  | ||||
|     def to_python(self, value): | ||||
|         if not isinstance(value, basestring): | ||||
|             value = unicode(value) | ||||
|         return uuid.UUID(value) | ||||
|         if not self._binary: | ||||
|             original_value = value | ||||
|             try: | ||||
|                 if not isinstance(value, basestring): | ||||
|                     value = unicode(value) | ||||
|                 return uuid.UUID(value) | ||||
|             except: | ||||
|                 return original_value | ||||
|         return value | ||||
|  | ||||
|     def to_mongo(self, value): | ||||
|         return unicode(value) | ||||
|         if not self._binary: | ||||
|             return unicode(value) | ||||
|         return value | ||||
|  | ||||
|     def prepare_query_value(self, op, value): | ||||
|         if value is None: | ||||
|             return None | ||||
|         return self.to_mongo(value) | ||||
|  | ||||
|     def validate(self, value): | ||||
|         if not isinstance(value, uuid.UUID): | ||||
|   | ||||
							
								
								
									
										61
									
								
								mongoengine/python_support.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										61
									
								
								mongoengine/python_support.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,61 @@ | ||||
| """Helper functions and types to aid with Python 2.5 - 3 support.""" | ||||
|  | ||||
| import sys | ||||
|  | ||||
| PY3 = sys.version_info[0] == 3 | ||||
| PY25 = sys.version_info[:2] == (2, 5) | ||||
| UNICODE_KWARGS = int(''.join([str(x) for x in sys.version_info[:3]])) > 264 | ||||
|  | ||||
| if PY3: | ||||
|     import codecs | ||||
|     from io import BytesIO as StringIO | ||||
|     # return s converted to binary.  b('test') should be equivalent to b'test' | ||||
|     def b(s): | ||||
|         return codecs.latin_1_encode(s)[0] | ||||
|  | ||||
|     bin_type = bytes | ||||
|     txt_type   = str | ||||
| else: | ||||
|     try: | ||||
|         from cStringIO import StringIO | ||||
|     except ImportError: | ||||
|         from StringIO import StringIO | ||||
|  | ||||
|     # Conversion to binary only necessary in Python 3 | ||||
|     def b(s): | ||||
|         return s | ||||
|  | ||||
|     bin_type = str | ||||
|     txt_type = unicode | ||||
|  | ||||
| str_types = (bin_type, txt_type) | ||||
|  | ||||
| if PY25: | ||||
|     def product(*args, **kwds): | ||||
|         pools = map(tuple, args) * kwds.get('repeat', 1) | ||||
|         result = [[]] | ||||
|         for pool in pools: | ||||
|             result = [x + [y] for x in result for y in pool] | ||||
|         for prod in result: | ||||
|             yield tuple(prod) | ||||
|     reduce = reduce | ||||
| else: | ||||
|     from itertools import product | ||||
|     from functools import reduce | ||||
|  | ||||
|  | ||||
| # For use with Python 2.5 | ||||
| # converts all keys from unicode to str for d and all nested dictionaries | ||||
| def to_str_keys_recursive(d): | ||||
|     if isinstance(d, list): | ||||
|         for val in d: | ||||
|             if isinstance(val, (dict, list)): | ||||
|                 to_str_keys_recursive(val) | ||||
|     elif isinstance(d, dict): | ||||
|         for key, val in d.items(): | ||||
|             if isinstance(val, (dict, list)): | ||||
|                 to_str_keys_recursive(val) | ||||
|             if isinstance(key, unicode): | ||||
|                 d[str(key)] = d.pop(key) | ||||
|     else: | ||||
|         raise ValueError("non list/dict parameter not allowed") | ||||
| @@ -4,10 +4,14 @@ import copy | ||||
| import itertools | ||||
| import operator | ||||
|  | ||||
| from collections import defaultdict | ||||
| from functools import partial | ||||
|  | ||||
| from mongoengine.python_support import product, reduce, PY3 | ||||
|  | ||||
| import pymongo | ||||
| from bson.code import Code | ||||
| from bson.son import SON | ||||
|  | ||||
| from mongoengine import signals | ||||
|  | ||||
| @@ -42,6 +46,10 @@ class OperationError(Exception): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class NotUniqueError(OperationError): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| RE_TYPE = type(re.compile('')) | ||||
|  | ||||
|  | ||||
| @@ -120,7 +128,7 @@ class QueryTreeTransformerVisitor(QNodeVisitor): | ||||
|             # the necessary parts. Then for each $or part, create a new query | ||||
|             # that ANDs the necessary part with the $or part. | ||||
|             clauses = [] | ||||
|             for or_group in itertools.product(*or_groups): | ||||
|             for or_group in product(*or_groups): | ||||
|                 q_object = reduce(lambda a, b: a & b, and_parts, Q()) | ||||
|                 q_object = reduce(lambda a, b: a & b, or_group, q_object) | ||||
|                 clauses.append(q_object) | ||||
| @@ -211,7 +219,7 @@ class QNode(object): | ||||
|     def _combine(self, other, operation): | ||||
|         """Combine this node with another node into a QCombination object. | ||||
|         """ | ||||
|         if other.empty: | ||||
|         if getattr(other, 'empty', True): | ||||
|             return self | ||||
|  | ||||
|         if self.empty: | ||||
| @@ -329,6 +337,7 @@ class QuerySet(object): | ||||
|     """ | ||||
|  | ||||
|     __already_indexed = set() | ||||
|     __dereference = False | ||||
|  | ||||
|     def __init__(self, document, collection): | ||||
|         self._document = document | ||||
| @@ -345,10 +354,12 @@ class QuerySet(object): | ||||
|         self._slave_okay = False | ||||
|         self._iter = False | ||||
|         self._scalar = [] | ||||
|         self._as_pymongo = False | ||||
|         self._as_pymongo_coerce = False | ||||
|  | ||||
|         # If inheritance is allowed, only return instances and instances of | ||||
|         # subclasses of the class being used | ||||
|         if document._meta.get('allow_inheritance'): | ||||
|         if document._meta.get('allow_inheritance') != False: | ||||
|             self._initial_query = {'_types': self._document._class_name} | ||||
|             self._loaded_fields = QueryFieldList(always_include=['_cls']) | ||||
|         self._cursor_obj = None | ||||
| @@ -356,6 +367,10 @@ class QuerySet(object): | ||||
|         self._skip = None | ||||
|         self._hint = -1  # Using -1 as None is a valid value for hint | ||||
|  | ||||
|     def __deepcopy__(self, memo): | ||||
|         """Essential for chained queries with ReferenceFields involved""" | ||||
|         return self.clone() | ||||
|  | ||||
|     def clone(self): | ||||
|         """Creates a copy of the current :class:`~mongoengine.queryset.QuerySet` | ||||
|  | ||||
| @@ -364,8 +379,8 @@ class QuerySet(object): | ||||
|         c = self.__class__(self._document, self._collection_obj) | ||||
|  | ||||
|         copy_props = ('_initial_query', '_query_obj', '_where_clause', | ||||
|                     '_loaded_fields', '_ordering', '_snapshot', | ||||
|                     '_timeout', '_limit', '_skip', '_slave_okay', '_hint') | ||||
|                       '_loaded_fields', '_ordering', '_snapshot', '_timeout', | ||||
|                       '_limit', '_skip', '_slave_okay', '_hint') | ||||
|  | ||||
|         for prop in copy_props: | ||||
|             val = getattr(self, prop) | ||||
| @@ -382,7 +397,7 @@ class QuerySet(object): | ||||
|         return self._mongo_query | ||||
|  | ||||
|     def ensure_index(self, key_or_list, drop_dups=False, background=False, | ||||
|         **kwargs): | ||||
|                      **kwargs): | ||||
|         """Ensure that the given indexes are in place. | ||||
|  | ||||
|         :param key_or_list: a single index key or a list of index keys (to | ||||
| @@ -390,12 +405,13 @@ class QuerySet(object): | ||||
|             or a **-** to determine the index ordering | ||||
|         """ | ||||
|         index_spec = QuerySet._build_index_spec(self._document, key_or_list) | ||||
|         self._collection.ensure_index( | ||||
|             index_spec['fields'], | ||||
|             drop_dups=drop_dups, | ||||
|             background=background, | ||||
|             sparse=index_spec.get('sparse', False), | ||||
|             unique=index_spec.get('unique', False)) | ||||
|         index_spec = index_spec.copy() | ||||
|         fields = index_spec.pop('fields') | ||||
|         index_spec['drop_dups'] = drop_dups | ||||
|         index_spec['background'] = background | ||||
|         index_spec.update(kwargs) | ||||
|  | ||||
|         self._collection.ensure_index(fields, **index_spec) | ||||
|         return self | ||||
|  | ||||
|     def __call__(self, q_obj=None, class_check=True, slave_okay=False, **query): | ||||
| @@ -438,7 +454,7 @@ class QuerySet(object): | ||||
|         """ | ||||
|         background = self._document._meta.get('index_background', False) | ||||
|         drop_dups = self._document._meta.get('index_drop_dups', False) | ||||
|         index_opts = self._document._meta.get('index_opts', {}) | ||||
|         index_opts = self._document._meta.get('index_opts') or {} | ||||
|         index_types = self._document._meta.get('index_types', True) | ||||
|  | ||||
|         # determine if an index which we are creating includes | ||||
| @@ -446,6 +462,7 @@ class QuerySet(object): | ||||
|         # an extra index on _type, as mongodb will use the existing | ||||
|         # index to service queries against _type | ||||
|         types_indexed = False | ||||
|  | ||||
|         def includes_types(fields): | ||||
|             first_field = None | ||||
|             if len(fields): | ||||
| @@ -462,13 +479,15 @@ class QuerySet(object): | ||||
|                 background=background, drop_dups=drop_dups, **index_opts) | ||||
|  | ||||
|         # Ensure document-defined indexes are created | ||||
|         if self._document._meta['indexes']: | ||||
|             for spec in self._document._meta['indexes']: | ||||
|                 types_indexed = types_indexed or includes_types(spec['fields']) | ||||
|         if self._document._meta['index_specs']: | ||||
|             index_spec = self._document._meta['index_specs'] | ||||
|             for spec in index_spec: | ||||
|                 spec = spec.copy() | ||||
|                 fields = spec.pop('fields') | ||||
|                 types_indexed = types_indexed or includes_types(fields) | ||||
|                 opts = index_opts.copy() | ||||
|                 opts['unique'] = spec.get('unique', False) | ||||
|                 opts['sparse'] = spec.get('sparse', False) | ||||
|                 self._collection.ensure_index(spec['fields'], | ||||
|                 opts.update(spec) | ||||
|                 self._collection.ensure_index(fields, | ||||
|                     background=background, **opts) | ||||
|  | ||||
|         # If _types is being used (for polymorphism), it needs an index, | ||||
| @@ -489,13 +508,24 @@ class QuerySet(object): | ||||
|         """ | ||||
|         if isinstance(spec, basestring): | ||||
|             spec = {'fields': [spec]} | ||||
|         if isinstance(spec, (list, tuple)): | ||||
|             spec = {'fields': spec} | ||||
|         elif isinstance(spec, (list, tuple)): | ||||
|             spec = {'fields': list(spec)} | ||||
|         elif isinstance(spec, dict): | ||||
|             spec = dict(spec) | ||||
|  | ||||
|         index_list = [] | ||||
|         direction = None | ||||
|         use_types = doc_cls._meta.get('allow_inheritance', True) | ||||
|  | ||||
|         allow_inheritance = doc_cls._meta.get('allow_inheritance') != False | ||||
|  | ||||
|         # If sparse - dont include types | ||||
|         use_types = allow_inheritance and not spec.get('sparse', False) | ||||
|  | ||||
|         for key in spec['fields']: | ||||
|             # If inherited spec continue | ||||
|             if isinstance(key, (list, tuple)): | ||||
|                 continue | ||||
|  | ||||
|             # Get ASCENDING direction from +, DESCENDING from -, and GEO2D from * | ||||
|             direction = pymongo.ASCENDING | ||||
|             if key.startswith("-"): | ||||
| @@ -510,24 +540,23 @@ class QuerySet(object): | ||||
|             parts = key.split('.') | ||||
|             if parts in (['pk'], ['id'], ['_id']): | ||||
|                 key = '_id' | ||||
|                 fields = [] | ||||
|             else: | ||||
|                 fields = QuerySet._lookup_field(doc_cls, parts) | ||||
|                 parts = [field if field == '_id' else field.db_field for field in fields] | ||||
|                 parts = [field if field == '_id' else field.db_field | ||||
|                          for field in fields] | ||||
|                 key = '.'.join(parts) | ||||
|             index_list.append((key, direction)) | ||||
|  | ||||
|             # If sparse - dont include types | ||||
|             if spec.get('sparse', False): | ||||
|                 use_types = False | ||||
|  | ||||
|             # Check if a list field is being used, don't use _types if it is | ||||
|             if use_types and not all(f._index_with_types for f in fields): | ||||
|                 use_types = False | ||||
|  | ||||
|         # If _types is being used, prepend it to every specified index | ||||
|         index_types = doc_cls._meta.get('index_types', True) | ||||
|         allow_inheritance = doc_cls._meta.get('allow_inheritance') | ||||
|         if spec.get('types', index_types) and allow_inheritance and use_types and direction is not pymongo.GEO2D: | ||||
|  | ||||
|         if (spec.get('types', index_types) and use_types | ||||
|             and direction is not pymongo.GEO2D): | ||||
|             index_list.insert(0, ('_types', 1)) | ||||
|  | ||||
|         spec['fields'] = index_list | ||||
| @@ -586,11 +615,13 @@ class QuerySet(object): | ||||
|             if self._where_clause: | ||||
|                 self._cursor_obj.where(self._where_clause) | ||||
|  | ||||
|             # apply default ordering | ||||
|             if self._ordering: | ||||
|                 # Apply query ordering | ||||
|                 self._cursor_obj.sort(self._ordering) | ||||
|             elif self._document._meta['ordering']: | ||||
|                 # Otherwise, apply the ordering from the document model | ||||
|                 self.order_by(*self._document._meta['ordering']) | ||||
|                 self._cursor_obj.sort(self._ordering) | ||||
|  | ||||
|             if self._limit is not None: | ||||
|                 self._cursor_obj.limit(self._limit - (self._skip or 0)) | ||||
| @@ -600,7 +631,6 @@ class QuerySet(object): | ||||
|  | ||||
|             if self._hint != -1: | ||||
|                 self._cursor_obj.hint(self._hint) | ||||
|  | ||||
|         return self._cursor_obj | ||||
|  | ||||
|     @classmethod | ||||
| @@ -641,7 +671,7 @@ class QuerySet(object): | ||||
|                 from mongoengine.fields import ReferenceField, GenericReferenceField | ||||
|                 if isinstance(field, (ReferenceField, GenericReferenceField)): | ||||
|                     raise InvalidQueryError('Cannot perform join in mongoDB: %s' % '__'.join(parts)) | ||||
|                 if getattr(field, 'field', None): | ||||
|                 if hasattr(getattr(field, 'field', None), 'lookup_member'): | ||||
|                     new_field = field.field.lookup_member(field_name) | ||||
|                 else: | ||||
|                    # Look up subfield on the previous field | ||||
| @@ -678,7 +708,8 @@ class QuerySet(object): | ||||
|         custom_operators = ['match'] | ||||
|  | ||||
|         mongo_query = {} | ||||
|         for key, value in query.items(): | ||||
|         merge_query = defaultdict(list) | ||||
|         for key, value in sorted(query.items()): | ||||
|             if key == "__raw__": | ||||
|                 mongo_query.update(value) | ||||
|                 continue | ||||
| @@ -765,9 +796,23 @@ class QuerySet(object): | ||||
|             key = '.'.join(parts) | ||||
|             if op is None or key not in mongo_query: | ||||
|                 mongo_query[key] = value | ||||
|             elif key in mongo_query and isinstance(mongo_query[key], dict): | ||||
|                 mongo_query[key].update(value) | ||||
|             elif key in mongo_query: | ||||
|                 if key in mongo_query and isinstance(mongo_query[key], dict): | ||||
|                     mongo_query[key].update(value) | ||||
|                 else: | ||||
|                     # Store for manually merging later | ||||
|                     merge_query[key].append(value) | ||||
|  | ||||
|         # The queryset has been filter in such a way we must manually merge | ||||
|         for k, v in merge_query.items(): | ||||
|             merge_query[k].append(mongo_query[k]) | ||||
|             del mongo_query[k] | ||||
|             if isinstance(v, list): | ||||
|                 value = [{k:val} for val in v] | ||||
|                 if '$and' in mongo_query.keys(): | ||||
|                     mongo_query['$and'].append(value) | ||||
|                 else: | ||||
|                     mongo_query['$and'] = value | ||||
|         return mongo_query | ||||
|  | ||||
|     def get(self, *q_objs, **query): | ||||
| @@ -806,9 +851,9 @@ class QuerySet(object): | ||||
|         keyword argument called :attr:`defaults`. | ||||
|  | ||||
|         .. note:: This requires two separate operations and therefore a | ||||
|         race condition exists.  Because there are no transactions in mongoDB | ||||
|         other approaches should be investigated, to ensure you don't | ||||
|         accidently duplicate data when using this method. | ||||
|             race condition exists.  Because there are no transactions in mongoDB | ||||
|             other approaches should be investigated, to ensure you don't | ||||
|             accidently duplicate data when using this method. | ||||
|  | ||||
|         :param write_options: optional extra keyword arguments used if we | ||||
|             have to create a new document. | ||||
| @@ -816,8 +861,8 @@ class QuerySet(object): | ||||
|  | ||||
|         :param auto_save: if the object is to be saved automatically if not found. | ||||
|  | ||||
|         .. versionchanged:: 0.6 - added `auto_save` | ||||
|         .. versionadded:: 0.3 | ||||
|         .. versionupdated:: 0.6 - added `auto_save` | ||||
|         """ | ||||
|         defaults = query.get('defaults', {}) | ||||
|         if 'defaults' in query: | ||||
| @@ -890,7 +935,7 @@ class QuerySet(object): | ||||
|             if not isinstance(doc, self._document): | ||||
|                 msg = "Some documents inserted aren't instances of %s" % str(self._document) | ||||
|                 raise OperationError(msg) | ||||
|             if doc.pk: | ||||
|             if doc.pk and not doc._created: | ||||
|                 msg = "Some documents have ObjectIds use doc.update() instead" | ||||
|                 raise OperationError(msg) | ||||
|             raw.append(doc.to_mongo()) | ||||
| @@ -900,8 +945,11 @@ class QuerySet(object): | ||||
|             ids = self._collection.insert(raw, **write_options) | ||||
|         except pymongo.errors.OperationFailure, err: | ||||
|             message = 'Could not save document (%s)' | ||||
|             if u'duplicate key' in unicode(err): | ||||
|             if re.match('^E1100[01] duplicate key', unicode(err)): | ||||
|                 # E11000 - duplicate key error index | ||||
|                 # E11001 - duplicate key on update | ||||
|                 message = u'Tried to save duplicate unique keys (%s)' | ||||
|                 raise NotUniqueError(message % unicode(err)) | ||||
|             raise OperationError(message % unicode(err)) | ||||
|  | ||||
|         if not load_bulk: | ||||
| @@ -946,6 +994,9 @@ class QuerySet(object): | ||||
|             for doc in docs: | ||||
|                 doc_map[doc['_id']] = self._get_scalar( | ||||
|                         self._document._from_son(doc)) | ||||
|         elif self._as_pymongo: | ||||
|             for doc in docs: | ||||
|                 doc_map[doc['_id']] = self._get_as_pymongo(doc) | ||||
|         else: | ||||
|             for doc in docs: | ||||
|                 doc_map[doc['_id']] = self._document._from_son(doc) | ||||
| @@ -962,6 +1013,9 @@ class QuerySet(object): | ||||
|             if self._scalar: | ||||
|                 return self._get_scalar(self._document._from_son( | ||||
|                         self._cursor.next())) | ||||
|             if self._as_pymongo: | ||||
|                 return self._get_as_pymongo(self._cursor.next()) | ||||
|  | ||||
|             return self._document._from_son(self._cursor.next()) | ||||
|         except StopIteration, e: | ||||
|             self.rewind() | ||||
| @@ -1001,6 +1055,8 @@ class QuerySet(object): | ||||
|                          :class:`~bson.code.Code` or string | ||||
|         :param output: output collection name, if set to 'inline' will try to | ||||
|                        use :class:`~pymongo.collection.Collection.inline_map_reduce` | ||||
|                        This can also be a dictionary containing output options | ||||
|                        see: http://docs.mongodb.org/manual/reference/commands/#mapReduce | ||||
|         :param finalize_f: finalize function, an optional function that | ||||
|                            performs any post-reduction processing. | ||||
|         :param scope: values to insert into map/reduce global scope. Optional. | ||||
| @@ -1142,6 +1198,8 @@ class QuerySet(object): | ||||
|             if self._scalar: | ||||
|                 return self._get_scalar(self._document._from_son( | ||||
|                         self._cursor[key])) | ||||
|             if self._as_pymongo: | ||||
|                 return self._get_as_pymongo(self._cursor.next()) | ||||
|             return self._document._from_son(self._cursor[key]) | ||||
|         raise AttributeError | ||||
|  | ||||
| @@ -1152,9 +1210,13 @@ class QuerySet(object): | ||||
|  | ||||
|         .. versionadded:: 0.4 | ||||
|         .. versionchanged:: 0.5 - Fixed handling references | ||||
|         .. versionchanged:: 0.6 - Improved db_field refrence handling | ||||
|         """ | ||||
|         from dereference import DeReference | ||||
|         return DeReference()(self._cursor.distinct(field), 1) | ||||
|         try: | ||||
|             field = self._fields_to_dbfields([field]).pop() | ||||
|         finally: | ||||
|             return self._dereference(self._cursor.distinct(field), 1, | ||||
|                                      name=field, instance=self._document) | ||||
|  | ||||
|     def only(self, *fields): | ||||
|         """Load only a subset of this document's fields. :: | ||||
| @@ -1259,7 +1321,8 @@ class QuerySet(object): | ||||
|             key_list.append((key, direction)) | ||||
|  | ||||
|         self._ordering = key_list | ||||
|         self._cursor.sort(key_list) | ||||
|         if self._cursor_obj: | ||||
|             self._cursor_obj.sort(key_list) | ||||
|         return self | ||||
|  | ||||
|     def explain(self, format=False): | ||||
| @@ -1309,9 +1372,16 @@ class QuerySet(object): | ||||
|         """ | ||||
|         doc = self._document | ||||
|  | ||||
|         # Handle deletes where skips or limits have been applied | ||||
|         if self._skip or self._limit: | ||||
|             for doc in self: | ||||
|                 doc.delete() | ||||
|             return | ||||
|  | ||||
|         delete_rules = doc._meta.get('delete_rules') or {} | ||||
|         # Check for DENY rules before actually deleting/nullifying any other | ||||
|         # references | ||||
|         for rule_entry in doc._meta['delete_rules']: | ||||
|         for rule_entry in delete_rules: | ||||
|             document_cls, field_name = rule_entry | ||||
|             rule = doc._meta['delete_rules'][rule_entry] | ||||
|             if rule == DENY and document_cls.objects(**{field_name + '__in': self}).count() > 0: | ||||
| @@ -1319,12 +1389,14 @@ class QuerySet(object): | ||||
|                         (document_cls.__name__, field_name) | ||||
|                 raise OperationError(msg) | ||||
|  | ||||
|         for rule_entry in doc._meta['delete_rules']: | ||||
|         for rule_entry in delete_rules: | ||||
|             document_cls, field_name = rule_entry | ||||
|             rule = doc._meta['delete_rules'][rule_entry] | ||||
|             if rule == CASCADE: | ||||
|                 ref_q = document_cls.objects(**{field_name + '__in': self}) | ||||
|                 if doc != document_cls or (doc == document_cls and ref_q.count() > 0): | ||||
|                 ref_q_count = ref_q.count() | ||||
|                 if (doc != document_cls and ref_q_count > 0 | ||||
|                     or (doc == document_cls and ref_q_count > 0)): | ||||
|                     ref_q.delete(safe=safe) | ||||
|             elif rule == NULLIFY: | ||||
|                 document_cls.objects(**{field_name + '__in': self}).update( | ||||
| @@ -1343,6 +1415,8 @@ class QuerySet(object): | ||||
|         """ | ||||
|         operators = ['set', 'unset', 'inc', 'dec', 'pop', 'push', 'push_all', | ||||
|                      'pull', 'pull_all', 'add_to_set'] | ||||
|         match_operators = ['ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod', | ||||
|                            'all', 'size', 'exists', 'not'] | ||||
|  | ||||
|         mongo_update = {} | ||||
|         for key, value in update.items(): | ||||
| @@ -1366,6 +1440,10 @@ class QuerySet(object): | ||||
|                 elif op == 'add_to_set': | ||||
|                     op = op.replace('_to_set', 'ToSet') | ||||
|  | ||||
|             match = None | ||||
|             if parts[-1] in match_operators: | ||||
|                 match = parts.pop() | ||||
|  | ||||
|             if _doc_cls: | ||||
|                 # Switch field names to proper names [set in Field(name='foo')] | ||||
|                 fields = QuerySet._lookup_field(_doc_cls, parts) | ||||
| @@ -1399,16 +1477,22 @@ class QuerySet(object): | ||||
|                     elif field.required or value is not None: | ||||
|                         value = field.prepare_query_value(op, value) | ||||
|  | ||||
|             if match: | ||||
|                 match = '$' + match | ||||
|                 value = {match: value} | ||||
|  | ||||
|             key = '.'.join(parts) | ||||
|  | ||||
|             if not op: | ||||
|                 raise InvalidQueryError("Updates must supply an operation eg: set__FIELD=value") | ||||
|                 raise InvalidQueryError("Updates must supply an operation " | ||||
|                                         "eg: set__FIELD=value") | ||||
|  | ||||
|             if 'pull' in op and '.' in key: | ||||
|                 # Dot operators don't work on pull operations | ||||
|                 # it uses nested dict syntax | ||||
|                 if op == 'pullAll': | ||||
|                     raise InvalidQueryError("pullAll operations only support a single field depth") | ||||
|                     raise InvalidQueryError("pullAll operations only support " | ||||
|                                             "a single field depth") | ||||
|  | ||||
|                 parts.reverse() | ||||
|                 for key in parts: | ||||
| @@ -1516,6 +1600,48 @@ class QuerySet(object): | ||||
|  | ||||
|         return tuple(data) | ||||
|  | ||||
|     def _get_as_pymongo(self, row): | ||||
|         # Extract which fields paths we should follow if .fields(...) was | ||||
|         # used. If not, handle all fields. | ||||
|         if not getattr(self, '__as_pymongo_fields', None): | ||||
|             self.__as_pymongo_fields = [] | ||||
|             for field in self._loaded_fields.fields - set(['_cls', '_id', '_types']): | ||||
|                 self.__as_pymongo_fields.append(field) | ||||
|                 while '.' in field: | ||||
|                     field, _ = field.rsplit('.', 1) | ||||
|                     self.__as_pymongo_fields.append(field) | ||||
|  | ||||
|         all_fields = not self.__as_pymongo_fields | ||||
|  | ||||
|         def clean(data, path=None): | ||||
|             path = path or '' | ||||
|  | ||||
|             if isinstance(data, dict): | ||||
|                 new_data = {} | ||||
|                 for key, value in data.iteritems(): | ||||
|                     new_path = '%s.%s' % (path, key) if path else key | ||||
|                     if all_fields or new_path in self.__as_pymongo_fields: | ||||
|                         new_data[key] = clean(value, path=new_path) | ||||
|                 data = new_data | ||||
|             elif isinstance(data, list): | ||||
|                 data = [clean(d, path=path) for d in data] | ||||
|             else: | ||||
|                 if self._as_pymongo_coerce: | ||||
|                     # If we need to coerce types, we need to determine the | ||||
|                     # type of this field and use the corresponding .to_python(...) | ||||
|                     from mongoengine.fields import EmbeddedDocumentField | ||||
|                     obj = self._document | ||||
|                     for chunk in path.split('.'): | ||||
|                         obj = getattr(obj, chunk, None) | ||||
|                         if obj is None: | ||||
|                             break | ||||
|                         elif isinstance(obj, EmbeddedDocumentField): | ||||
|                             obj = obj.document_type | ||||
|                     if obj and data is not None: | ||||
|                         data = obj.to_python(data) | ||||
|             return data | ||||
|         return clean(row) | ||||
|  | ||||
|     def scalar(self, *fields): | ||||
|         """Instead of returning Document instances, return either a specific | ||||
|         value or a tuple of values in order. | ||||
| @@ -1538,6 +1664,16 @@ class QuerySet(object): | ||||
|         """An alias for scalar""" | ||||
|         return self.scalar(*fields) | ||||
|  | ||||
|     def as_pymongo(self, coerce_types=False): | ||||
|         """Instead of returning Document instances, return raw values from | ||||
|         pymongo. | ||||
|  | ||||
|         :param coerce_type: Field types (if applicable) would be use to coerce types. | ||||
|         """ | ||||
|         self._as_pymongo = True | ||||
|         self._as_pymongo_coerce = coerce_types | ||||
|         return self | ||||
|  | ||||
|     def _sub_js_fields(self, code): | ||||
|         """When fields are specified with [~fieldname] syntax, where | ||||
|         *fieldname* is the Python name of a field, *fieldname* will be | ||||
| @@ -1854,13 +1990,30 @@ class QuerySet(object): | ||||
|  | ||||
|         .. versionadded:: 0.5 | ||||
|         """ | ||||
|         from dereference import DeReference | ||||
|         # Make select related work the same for querysets | ||||
|         max_depth += 1 | ||||
|         return DeReference()(self, max_depth=max_depth) | ||||
|         return self._dereference(self, max_depth=max_depth) | ||||
|  | ||||
|     @property | ||||
|     def _dereference(self): | ||||
|         if not self.__dereference: | ||||
|             from dereference import DeReference | ||||
|             self.__dereference = DeReference()  # Cached | ||||
|         return self.__dereference | ||||
|  | ||||
|  | ||||
| class QuerySetManager(object): | ||||
|     """ | ||||
|     The default QuerySet Manager. | ||||
|  | ||||
|     Custom QuerySet Manager functions can extend this class and users can | ||||
|     add extra queryset functionality.  Any custom manager methods must accept a | ||||
|     :class:`~mongoengine.Document` class as its first argument, and a | ||||
|     :class:`~mongoengine.queryset.QuerySet` as its second argument. | ||||
|  | ||||
|     The method function should return a :class:`~mongoengine.queryset.QuerySet` | ||||
|     , probably the same one that was passed in, but modified in some way. | ||||
|     """ | ||||
|  | ||||
|     get_queryset = None | ||||
|  | ||||
| @@ -1878,13 +2031,13 @@ class QuerySetManager(object): | ||||
|             return self | ||||
|  | ||||
|         # owner is the document that contains the QuerySetManager | ||||
|         queryset_class = owner._meta['queryset_class'] or QuerySet | ||||
|         queryset_class = owner._meta.get('queryset_class') or QuerySet | ||||
|         queryset = queryset_class(owner, owner._get_collection()) | ||||
|         if self.get_queryset: | ||||
|             var_names = self.get_queryset.func_code.co_varnames | ||||
|             if var_names == ('queryset',): | ||||
|             arg_count = self.get_queryset.func_code.co_argcount | ||||
|             if arg_count == 1: | ||||
|                 queryset = self.get_queryset(queryset) | ||||
|             elif var_names == ('doc_cls', 'queryset',): | ||||
|             elif arg_count == 2: | ||||
|                 queryset = self.get_queryset(owner, queryset) | ||||
|             else: | ||||
|                 queryset = partial(self.get_queryset, owner, queryset) | ||||
|   | ||||
| @@ -5,7 +5,7 @@ | ||||
| %define srcname mongoengine | ||||
|  | ||||
| Name:           python-%{srcname} | ||||
| Version:        0.6.16 | ||||
| Version:        0.7.10 | ||||
| Release:        1%{?dist} | ||||
| Summary:        A Python Document-Object Mapper for working with MongoDB | ||||
|  | ||||
| @@ -51,4 +51,4 @@ rm -rf $RPM_BUILD_ROOT | ||||
| # %{python_sitearch}/* | ||||
|  | ||||
| %changelog | ||||
| * See: http://readthedocs.org/docs/mongoengine-odm/en/latest/changelog.html | ||||
| * See: http://docs.mongoengine.org/en/latest/changelog.html | ||||
| @@ -1,13 +1,11 @@ | ||||
| [aliases] | ||||
| test = nosetests | ||||
|  | ||||
| [nosetests] | ||||
| verbosity = 2 | ||||
| verbosity = 3 | ||||
| detailed-errors = 1 | ||||
| #with-coverage = 1 | ||||
| #cover-erase = 1 | ||||
| #cover-html = 1 | ||||
| #cover-html-dir = ../htmlcov | ||||
| #cover-package = mongoengine | ||||
| py3where = build | ||||
| where = tests | ||||
| #tests = test_bugfix.py | ||||
| #tests =  test_bugfix.py | ||||
							
								
								
									
										49
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										49
									
								
								setup.py
									
									
									
									
									
								
							| @@ -1,27 +1,35 @@ | ||||
| from setuptools import setup, find_packages | ||||
| import os | ||||
| import sys | ||||
| from setuptools import setup, find_packages | ||||
|  | ||||
| DESCRIPTION = "A Python Document-Object Mapper for working with MongoDB" | ||||
| # Hack to silence atexit traceback in newer python versions | ||||
| try: | ||||
|     import multiprocessing | ||||
| except ImportError: | ||||
|     pass | ||||
|  | ||||
| DESCRIPTION = """MongoEngine is a Python Object-Document | ||||
| Mapper for working with MongoDB.""" | ||||
| LONG_DESCRIPTION = None | ||||
| try: | ||||
|     LONG_DESCRIPTION = open('README.rst').read() | ||||
| except: | ||||
|     pass | ||||
|  | ||||
|  | ||||
| def get_version(version_tuple): | ||||
|     version = '%s.%s' % (version_tuple[0], version_tuple[1]) | ||||
|     if version_tuple[2]: | ||||
|         version = '%s.%s' % (version, version_tuple[2]) | ||||
|     return version | ||||
|     if not isinstance(version_tuple[-1], int): | ||||
|         return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1] | ||||
|     return '.'.join(map(str, version_tuple)) | ||||
|  | ||||
| # Dirty hack to get version number from monogengine/__init__.py - we can't | ||||
| # import it as it depends on PyMongo and PyMongo isn't installed until this | ||||
| # file is read | ||||
| init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py') | ||||
| version_line = filter(lambda l: l.startswith('VERSION'), open(init))[0] | ||||
| version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0] | ||||
|  | ||||
| VERSION = get_version(eval(version_line.split('=')[-1])) | ||||
| print VERSION | ||||
| print(VERSION) | ||||
|  | ||||
| CLASSIFIERS = [ | ||||
|     'Development Status :: 4 - Beta', | ||||
| @@ -29,18 +37,38 @@ CLASSIFIERS = [ | ||||
|     'License :: OSI Approved :: MIT License', | ||||
|     'Operating System :: OS Independent', | ||||
|     'Programming Language :: Python', | ||||
|     "Programming Language :: Python :: 2", | ||||
|     "Programming Language :: Python :: 2.5", | ||||
|     "Programming Language :: Python :: 2.6", | ||||
|     "Programming Language :: Python :: 2.7", | ||||
|     "Programming Language :: Python :: 3", | ||||
|     "Programming Language :: Python :: 3.1", | ||||
|     "Programming Language :: Python :: 3.2", | ||||
|     "Programming Language :: Python :: Implementation :: CPython", | ||||
|     'Topic :: Database', | ||||
|     'Topic :: Software Development :: Libraries :: Python Modules', | ||||
| ] | ||||
|  | ||||
| extra_opts = {} | ||||
| if sys.version_info[0] == 3: | ||||
|     extra_opts['use_2to3'] = True | ||||
|     extra_opts['tests_require'] = ['nose', 'coverage', 'blinker'] | ||||
|     extra_opts['packages'] = find_packages(exclude=('tests',)) | ||||
|     if "test" in sys.argv or "nosetests" in sys.argv: | ||||
|         extra_opts['packages'].append("tests") | ||||
|         extra_opts['package_data'] = {"tests": ["mongoengine.png"]} | ||||
| else: | ||||
|     extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django==1.4.2', 'PIL'] | ||||
|     extra_opts['packages'] = find_packages(exclude=('tests',)) | ||||
|  | ||||
| setup(name='mongoengine', | ||||
|       version=VERSION, | ||||
|       packages=find_packages(exclude=('tests',)), | ||||
|       author='Harry Marr', | ||||
|       author_email='harry.marr@{nospam}gmail.com', | ||||
|       maintainer="Ross Lawley", | ||||
|       maintainer_email="ross.lawley@{nospam}gmail.com", | ||||
|       url='http://mongoengine.org/', | ||||
|       download_url='https://github.com/MongoEngine/mongoengine/tarball/master', | ||||
|       license='MIT', | ||||
|       include_package_data=True, | ||||
|       description=DESCRIPTION, | ||||
| @@ -48,5 +76,6 @@ setup(name='mongoengine', | ||||
|       platforms=['any'], | ||||
|       classifiers=CLASSIFIERS, | ||||
|       install_requires=['pymongo'], | ||||
|       tests_require=['nose', 'coverage', 'blinker', 'django>=1.3', 'PIL'] | ||||
|       test_suite='nose.collector', | ||||
|       **extra_opts | ||||
| ) | ||||
|   | ||||
							
								
								
									
										98
									
								
								tests/test_all_warnings.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										98
									
								
								tests/test_all_warnings.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,98 @@ | ||||
| import unittest | ||||
| import warnings | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine.tests import query_counter | ||||
|  | ||||
|  | ||||
| class TestWarnings(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         conn = connect(db='mongoenginetest') | ||||
|         self.warning_list = [] | ||||
|         self.showwarning_default = warnings.showwarning | ||||
|         warnings.showwarning = self.append_to_warning_list | ||||
|  | ||||
|     def append_to_warning_list(self, message, category, *args): | ||||
|         self.warning_list.append({"message": message, | ||||
|                                   "category": category}) | ||||
|  | ||||
|     def tearDown(self): | ||||
|         # restore default handling of warnings | ||||
|         warnings.showwarning = self.showwarning_default | ||||
|  | ||||
|     def test_allow_inheritance_future_warning(self): | ||||
|         """Add FutureWarning for future allow_inhertiance default change. | ||||
|         """ | ||||
|  | ||||
|         class SimpleBase(Document): | ||||
|             a = IntField() | ||||
|  | ||||
|         class InheritedClass(SimpleBase): | ||||
|             b = IntField() | ||||
|  | ||||
|         InheritedClass() | ||||
|         self.assertEqual(len(self.warning_list), 1) | ||||
|         warning = self.warning_list[0] | ||||
|         self.assertEqual(FutureWarning, warning["category"]) | ||||
|         self.assertTrue("InheritedClass" in str(warning["message"])) | ||||
|  | ||||
|     def test_dbref_reference_field_future_warning(self): | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             parent = ReferenceField('self') | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         p1 = Person() | ||||
|         p1.parent = None | ||||
|         p1.save() | ||||
|  | ||||
|         p2 = Person(name="Wilson Jr") | ||||
|         p2.parent = p1 | ||||
|         p2.save(cascade=False) | ||||
|  | ||||
|         self.assertTrue(len(self.warning_list) > 0) | ||||
|         warning = self.warning_list[0] | ||||
|         self.assertEqual(FutureWarning, warning["category"]) | ||||
|         self.assertTrue("ReferenceFields will default to using ObjectId" | ||||
|                         in str(warning["message"])) | ||||
|  | ||||
|     def test_document_save_cascade_future_warning(self): | ||||
|  | ||||
|         class Person(Document): | ||||
|             name = StringField() | ||||
|             parent = ReferenceField('self') | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         p1 = Person(name="Wilson Snr") | ||||
|         p1.parent = None | ||||
|         p1.save() | ||||
|  | ||||
|         p2 = Person(name="Wilson Jr") | ||||
|         p2.parent = p1 | ||||
|         p2.parent.name = "Poppa Wilson" | ||||
|         p2.save() | ||||
|  | ||||
|         self.assertTrue(len(self.warning_list) > 0) | ||||
|         if len(self.warning_list) > 1: | ||||
|             print self.warning_list | ||||
|         warning = self.warning_list[0] | ||||
|         self.assertEqual(FutureWarning, warning["category"]) | ||||
|         self.assertTrue("Cascading saves will default to off in 0.8" | ||||
|                         in str(warning["message"])) | ||||
|  | ||||
|     def test_document_collection_syntax_warning(self): | ||||
|  | ||||
|         class NonAbstractBase(Document): | ||||
|             pass | ||||
|  | ||||
|         class InheritedDocumentFailTest(NonAbstractBase): | ||||
|             meta = {'collection': 'fail'} | ||||
|  | ||||
|         warning = self.warning_list[0] | ||||
|         self.assertEqual(SyntaxWarning, warning["category"]) | ||||
|         self.assertEqual('non_abstract_base', | ||||
|                          InheritedDocumentFailTest._get_collection_name()) | ||||
| @@ -1,5 +1,9 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| from __future__ import with_statement | ||||
| import unittest | ||||
|  | ||||
| from bson import DBRef, ObjectId | ||||
|  | ||||
| from mongoengine import * | ||||
| from mongoengine.connection import get_db | ||||
| from mongoengine.tests import query_counter | ||||
| @@ -39,6 +43,12 @@ class FieldTest(unittest.TestCase): | ||||
|             group_obj = Group.objects.first() | ||||
|             self.assertEqual(q, 1) | ||||
|  | ||||
|             len(group_obj._data['members']) | ||||
|             self.assertEqual(q, 1) | ||||
|  | ||||
|             len(group_obj.members) | ||||
|             self.assertEqual(q, 2) | ||||
|  | ||||
|             [m for m in group_obj.members] | ||||
|             self.assertEqual(q, 2) | ||||
|  | ||||
| @@ -63,6 +73,132 @@ class FieldTest(unittest.TestCase): | ||||
|         User.drop_collection() | ||||
|         Group.drop_collection() | ||||
|  | ||||
|     def test_list_item_dereference_dref_false(self): | ||||
|         """Ensure that DBRef items in ListFields are dereferenced. | ||||
|         """ | ||||
|         class User(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         class Group(Document): | ||||
|             members = ListField(ReferenceField(User, dbref=False)) | ||||
|  | ||||
|         User.drop_collection() | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         for i in xrange(1, 51): | ||||
|             user = User(name='user %s' % i) | ||||
|             user.save() | ||||
|  | ||||
|         group = Group(members=User.objects) | ||||
|         group.save() | ||||
|         group.reload()  # Confirm reload works | ||||
|  | ||||
|         with query_counter() as q: | ||||
|             self.assertEqual(q, 0) | ||||
|  | ||||
|             group_obj = Group.objects.first() | ||||
|             self.assertEqual(q, 1) | ||||
|  | ||||
|             [m for m in group_obj.members] | ||||
|             self.assertEqual(q, 2) | ||||
|  | ||||
|         # Document select_related | ||||
|         with query_counter() as q: | ||||
|             self.assertEqual(q, 0) | ||||
|  | ||||
|             group_obj = Group.objects.first().select_related() | ||||
|  | ||||
|             self.assertEqual(q, 2) | ||||
|             [m for m in group_obj.members] | ||||
|             self.assertEqual(q, 2) | ||||
|  | ||||
|         # Queryset select_related | ||||
|         with query_counter() as q: | ||||
|             self.assertEqual(q, 0) | ||||
|             group_objs = Group.objects.select_related() | ||||
|             self.assertEqual(q, 2) | ||||
|             for group_obj in group_objs: | ||||
|                 [m for m in group_obj.members] | ||||
|                 self.assertEqual(q, 2) | ||||
|  | ||||
|         User.drop_collection() | ||||
|         Group.drop_collection() | ||||
|  | ||||
|     def test_handle_old_style_references(self): | ||||
|         """Ensure that DBRef items in ListFields are dereferenced. | ||||
|         """ | ||||
|         class User(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         class Group(Document): | ||||
|             members = ListField(ReferenceField(User, dbref=True)) | ||||
|  | ||||
|         User.drop_collection() | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         for i in xrange(1, 26): | ||||
|             user = User(name='user %s' % i) | ||||
|             user.save() | ||||
|  | ||||
|         group = Group(members=User.objects) | ||||
|         group.save() | ||||
|  | ||||
|         group = Group._get_collection().find_one() | ||||
|  | ||||
|         # Update the model to change the reference | ||||
|         class Group(Document): | ||||
|             members = ListField(ReferenceField(User, dbref=False)) | ||||
|  | ||||
|         group = Group.objects.first() | ||||
|         group.members.append(User(name="String!").save()) | ||||
|         group.save() | ||||
|  | ||||
|         group = Group.objects.first() | ||||
|         self.assertEqual(group.members[0].name, 'user 1') | ||||
|         self.assertEqual(group.members[-1].name, 'String!') | ||||
|  | ||||
|     def test_migrate_references(self): | ||||
|         """Example of migrating ReferenceField storage | ||||
|         """ | ||||
|  | ||||
|         # Create some sample data | ||||
|         class User(Document): | ||||
|             name = StringField() | ||||
|  | ||||
|         class Group(Document): | ||||
|             author = ReferenceField(User, dbref=True) | ||||
|             members = ListField(ReferenceField(User, dbref=True)) | ||||
|  | ||||
|         User.drop_collection() | ||||
|         Group.drop_collection() | ||||
|  | ||||
|         user = User(name="Ross").save() | ||||
|         group = Group(author=user, members=[user]).save() | ||||
|  | ||||
|         raw_data = Group._get_collection().find_one() | ||||
|         self.assertTrue(isinstance(raw_data['author'], DBRef)) | ||||
|         self.assertTrue(isinstance(raw_data['members'][0], DBRef)) | ||||
|  | ||||
|         # Migrate the model definition | ||||
|         class Group(Document): | ||||
|             author = ReferenceField(User, dbref=False) | ||||
|             members = ListField(ReferenceField(User, dbref=False)) | ||||
|  | ||||
|         # Migrate the data | ||||
|         for g in Group.objects(): | ||||
|             # Explicitly mark as changed so resets | ||||
|             g._mark_as_changed('author') | ||||
|             g._mark_as_changed('members') | ||||
|             g.save() | ||||
|  | ||||
|         group = Group.objects.first() | ||||
|         self.assertEqual(group.author, user) | ||||
|         self.assertEqual(group.members, [user]) | ||||
|  | ||||
|         raw_data = Group._get_collection().find_one() | ||||
|         self.assertTrue(isinstance(raw_data['author'], ObjectId)) | ||||
|         self.assertTrue(isinstance(raw_data['members'][0], ObjectId)) | ||||
|  | ||||
|     def test_recursive_reference(self): | ||||
|         """Ensure that ReferenceFields can reference their own documents. | ||||
|         """ | ||||
| @@ -109,10 +245,10 @@ class FieldTest(unittest.TestCase): | ||||
|             peter = Employee.objects.with_id(peter.id).select_related() | ||||
|             self.assertEqual(q, 2) | ||||
|  | ||||
|             self.assertEquals(peter.boss, bill) | ||||
|             self.assertEqual(peter.boss, bill) | ||||
|             self.assertEqual(q, 2) | ||||
|  | ||||
|             self.assertEquals(peter.friends, friends) | ||||
|             self.assertEqual(peter.friends, friends) | ||||
|             self.assertEqual(q, 2) | ||||
|  | ||||
|         # Queryset select_related | ||||
| @@ -123,10 +259,10 @@ class FieldTest(unittest.TestCase): | ||||
|             self.assertEqual(q, 2) | ||||
|  | ||||
|             for employee in employees: | ||||
|                 self.assertEquals(employee.boss, bill) | ||||
|                 self.assertEqual(employee.boss, bill) | ||||
|                 self.assertEqual(q, 2) | ||||
|  | ||||
|                 self.assertEquals(employee.friends, friends) | ||||
|                 self.assertEqual(employee.friends, friends) | ||||
|                 self.assertEqual(q, 2) | ||||
|  | ||||
|     def test_circular_reference(self): | ||||
| @@ -160,7 +296,7 @@ class FieldTest(unittest.TestCase): | ||||
|         daughter.relations.append(self_rel) | ||||
|         daughter.save() | ||||
|  | ||||
|         self.assertEquals("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects()) | ||||
|         self.assertEqual("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects()) | ||||
|  | ||||
|     def test_circular_reference_on_self(self): | ||||
|         """Ensure you can handle circular references | ||||
| @@ -186,7 +322,7 @@ class FieldTest(unittest.TestCase): | ||||
|         daughter.relations.append(daughter) | ||||
|         daughter.save() | ||||
|  | ||||
|         self.assertEquals("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects()) | ||||
|         self.assertEqual("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects()) | ||||
|  | ||||
|     def test_circular_tree_reference(self): | ||||
|         """Ensure you can handle circular references with more than one level | ||||
| @@ -228,7 +364,7 @@ class FieldTest(unittest.TestCase): | ||||
|         anna.other.name = "Anna's friends" | ||||
|         anna.save() | ||||
|  | ||||
|         self.assertEquals( | ||||
|         self.assertEqual( | ||||
|             "[<Person: Paul>, <Person: Maria>, <Person: Julia>, <Person: Anna>]", | ||||
|             "%s" % Person.objects() | ||||
|         ) | ||||
| @@ -781,8 +917,8 @@ class FieldTest(unittest.TestCase): | ||||
|         root.save() | ||||
|  | ||||
|         root = root.reload() | ||||
|         self.assertEquals(root.children, [company]) | ||||
|         self.assertEquals(company.parents, [root]) | ||||
|         self.assertEqual(root.children, [company]) | ||||
|         self.assertEqual(company.parents, [root]) | ||||
|  | ||||
|     def test_dict_in_dbref_instance(self): | ||||
|  | ||||
| @@ -808,8 +944,8 @@ class FieldTest(unittest.TestCase): | ||||
|         room_101.save() | ||||
|  | ||||
|         room = Room.objects.first().select_related() | ||||
|         self.assertEquals(room.staffs_with_position[0]['staff'], sarah) | ||||
|         self.assertEquals(room.staffs_with_position[1]['staff'], bob) | ||||
|         self.assertEqual(room.staffs_with_position[0]['staff'], sarah) | ||||
|         self.assertEqual(room.staffs_with_position[1]['staff'], bob) | ||||
|  | ||||
|     def test_document_reload_no_inheritance(self): | ||||
|         class Foo(Document): | ||||
| @@ -839,8 +975,8 @@ class FieldTest(unittest.TestCase): | ||||
|         foo.save() | ||||
|         foo.reload() | ||||
|  | ||||
|         self.assertEquals(type(foo.bar), Bar) | ||||
|         self.assertEquals(type(foo.baz), Baz) | ||||
|         self.assertEqual(type(foo.bar), Bar) | ||||
|         self.assertEqual(type(foo.baz), Baz) | ||||
|  | ||||
|     def test_list_lookup_not_checked_in_map(self): | ||||
|         """Ensure we dereference list data correctly | ||||
| @@ -862,4 +998,58 @@ class FieldTest(unittest.TestCase): | ||||
|  | ||||
|         msg = Message.objects.get(id=1) | ||||
|         self.assertEqual(0, msg.comments[0].id) | ||||
|         self.assertEqual(1, msg.comments[1].id) | ||||
|         self.assertEqual(1, msg.comments[1].id) | ||||
|  | ||||
|     def test_tuples_as_tuples(self): | ||||
|         """ | ||||
|         Ensure that tuples remain tuples when they are | ||||
|         inside a ComplexBaseField | ||||
|         """ | ||||
|         from mongoengine.base import BaseField | ||||
|         class EnumField(BaseField): | ||||
|             def __init__(self, **kwargs): | ||||
|                 super(EnumField,self).__init__(**kwargs) | ||||
|  | ||||
|             def to_mongo(self, value): | ||||
|                 return value | ||||
|  | ||||
|             def to_python(self, value): | ||||
|                 return tuple(value) | ||||
|  | ||||
|         class TestDoc(Document): | ||||
|             items = ListField(EnumField()) | ||||
|  | ||||
|         TestDoc.drop_collection() | ||||
|         tuples = [(100, 'Testing')] | ||||
|         doc = TestDoc() | ||||
|         doc.items = tuples | ||||
|         doc.save() | ||||
|         x = TestDoc.objects().get() | ||||
|         self.assertTrue(x is not None) | ||||
|         self.assertTrue(len(x.items) == 1) | ||||
|         self.assertTrue(tuple(x.items[0]) in tuples) | ||||
|         self.assertTrue(x.items[0] in tuples) | ||||
|  | ||||
|     def test_non_ascii_pk(self): | ||||
|         """ | ||||
|         Ensure that dbref conversion to string does not fail when | ||||
|         non-ascii characters are used in primary key | ||||
|         """ | ||||
|         class Brand(Document): | ||||
|             title = StringField(max_length=255, primary_key=True) | ||||
|  | ||||
|         class BrandGroup(Document): | ||||
|             title = StringField(max_length=255, primary_key=True) | ||||
|             brands = ListField(ReferenceField("Brand", dbref=True)) | ||||
|  | ||||
|         Brand.drop_collection() | ||||
|         BrandGroup.drop_collection() | ||||
|  | ||||
|         brand1 = Brand(title="Moschino").save() | ||||
|         brand2 = Brand(title=u"Денис Симачёв").save() | ||||
|  | ||||
|         BrandGroup(title="top_brands", brands=[brand1, brand2]).save() | ||||
|         brand_groups = BrandGroup.objects().all() | ||||
|  | ||||
|         self.assertEqual(2, len([brand for bg in brand_groups for brand in bg.brands])) | ||||
|  | ||||
|   | ||||
| @@ -1,24 +1,34 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
|  | ||||
| from __future__ import with_statement | ||||
| import unittest | ||||
|  | ||||
| from nose.plugins.skip import SkipTest | ||||
| from mongoengine.python_support import PY3 | ||||
| from mongoengine import * | ||||
| from mongoengine.django.shortcuts import get_document_or_404 | ||||
|  | ||||
| from django.http import Http404 | ||||
| from django.template import Context, Template | ||||
| from django.conf import settings | ||||
| from django.core.paginator import Paginator | ||||
| try: | ||||
|     from mongoengine.django.shortcuts import get_document_or_404 | ||||
|  | ||||
| settings.configure() | ||||
|     from django.http import Http404 | ||||
|     from django.template import Context, Template | ||||
|     from django.conf import settings | ||||
|     from django.core.paginator import Paginator | ||||
|  | ||||
| from django.contrib.sessions.tests import SessionTestsMixin | ||||
| from mongoengine.django.sessions import SessionStore, MongoSession | ||||
|     settings.configure() | ||||
|  | ||||
|     from django.contrib.sessions.tests import SessionTestsMixin | ||||
|     from mongoengine.django.sessions import SessionStore, MongoSession | ||||
| except Exception, err: | ||||
|     if PY3: | ||||
|         SessionTestsMixin = type  # dummy value so no error | ||||
|         SessionStore = None  # dummy value so no error | ||||
|     else: | ||||
|         raise err | ||||
|  | ||||
|  | ||||
| class QuerySetTest(unittest.TestCase): | ||||
|  | ||||
|     def setUp(self): | ||||
|         if PY3: | ||||
|             raise SkipTest('django does not have Python 3 support') | ||||
|         connect(db='mongoenginetest') | ||||
|  | ||||
|         class Person(Document): | ||||
| @@ -99,6 +109,8 @@ class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase): | ||||
|     backend = SessionStore | ||||
|  | ||||
|     def setUp(self): | ||||
|         if PY3: | ||||
|             raise SkipTest('django does not have Python 3 support') | ||||
|         connect(db='mongoenginetest') | ||||
|         MongoSession.drop_collection() | ||||
|         super(MongoDBSessionTest, self).setUp() | ||||
|   | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -25,14 +25,14 @@ class DynamicDocTest(unittest.TestCase): | ||||
|         p.name = "James" | ||||
|         p.age = 34 | ||||
|  | ||||
|         self.assertEquals(p.to_mongo(), | ||||
|         self.assertEqual(p.to_mongo(), | ||||
|             {"_types": ["Person"], "_cls": "Person", | ||||
|              "name": "James", "age": 34} | ||||
|         ) | ||||
|  | ||||
|         p.save() | ||||
|  | ||||
|         self.assertEquals(self.Person.objects.first().age, 34) | ||||
|         self.assertEqual(self.Person.objects.first().age, 34) | ||||
|  | ||||
|         # Confirm no changes to self.Person | ||||
|         self.assertFalse(hasattr(self.Person, 'age')) | ||||
| @@ -40,11 +40,11 @@ class DynamicDocTest(unittest.TestCase): | ||||
|     def test_dynamic_document_delta(self): | ||||
|         """Ensures simple dynamic documents can delta correctly""" | ||||
|         p = self.Person(name="James", age=34) | ||||
|         self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {})) | ||||
|         self.assertEqual(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {})) | ||||
|  | ||||
|         p.doc = 123 | ||||
|         del(p.doc) | ||||
|         self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {'doc': 1})) | ||||
|         self.assertEqual(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {'doc': 1})) | ||||
|  | ||||
|     def test_change_scope_of_variable(self): | ||||
|         """Test changing the scope of a dynamic field has no adverse effects""" | ||||
| @@ -58,7 +58,7 @@ class DynamicDocTest(unittest.TestCase): | ||||
|         p.save() | ||||
|  | ||||
|         p = self.Person.objects.get() | ||||
|         self.assertEquals(p.misc, {'hello': 'world'}) | ||||
|         self.assertEqual(p.misc, {'hello': 'world'}) | ||||
|  | ||||
|     def test_delete_dynamic_field(self): | ||||
|         """Test deleting a dynamic field works""" | ||||
| @@ -73,10 +73,10 @@ class DynamicDocTest(unittest.TestCase): | ||||
|         p.save() | ||||
|  | ||||
|         p = self.Person.objects.get() | ||||
|         self.assertEquals(p.misc, {'hello': 'world'}) | ||||
|         self.assertEqual(p.misc, {'hello': 'world'}) | ||||
|         collection = self.db[self.Person._get_collection_name()] | ||||
|         obj = collection.find_one() | ||||
|         self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'misc', 'name']) | ||||
|         self.assertEqual(sorted(obj.keys()), ['_cls', '_id', '_types', 'misc', 'name']) | ||||
|  | ||||
|         del(p.misc) | ||||
|         p.save() | ||||
| @@ -85,7 +85,7 @@ class DynamicDocTest(unittest.TestCase): | ||||
|         self.assertFalse(hasattr(p, 'misc')) | ||||
|  | ||||
|         obj = collection.find_one() | ||||
|         self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'name']) | ||||
|         self.assertEqual(sorted(obj.keys()), ['_cls', '_id', '_types', 'name']) | ||||
|  | ||||
|     def test_dynamic_document_queries(self): | ||||
|         """Ensure we can query dynamic fields""" | ||||
| @@ -94,10 +94,10 @@ class DynamicDocTest(unittest.TestCase): | ||||
|         p.age = 22 | ||||
|         p.save() | ||||
|  | ||||
|         self.assertEquals(1, self.Person.objects(age=22).count()) | ||||
|         self.assertEqual(1, self.Person.objects(age=22).count()) | ||||
|         p = self.Person.objects(age=22) | ||||
|         p = p.get() | ||||
|         self.assertEquals(22, p.age) | ||||
|         self.assertEqual(22, p.age) | ||||
|  | ||||
|     def test_complex_dynamic_document_queries(self): | ||||
|         class Person(DynamicDocument): | ||||
| @@ -117,8 +117,8 @@ class DynamicDocTest(unittest.TestCase): | ||||
|         p2.age = 10 | ||||
|         p2.save() | ||||
|  | ||||
|         self.assertEquals(Person.objects(age__icontains='ten').count(), 2) | ||||
|         self.assertEquals(Person.objects(age__gte=10).count(), 1) | ||||
|         self.assertEqual(Person.objects(age__icontains='ten').count(), 2) | ||||
|         self.assertEqual(Person.objects(age__gte=10).count(), 1) | ||||
|  | ||||
|     def test_complex_data_lookups(self): | ||||
|         """Ensure you can query dynamic document dynamic fields""" | ||||
| @@ -126,7 +126,7 @@ class DynamicDocTest(unittest.TestCase): | ||||
|         p.misc = {'hello': 'world'} | ||||
|         p.save() | ||||
|  | ||||
|         self.assertEquals(1, self.Person.objects(misc__hello='world').count()) | ||||
|         self.assertEqual(1, self.Person.objects(misc__hello='world').count()) | ||||
|  | ||||
|     def test_inheritance(self): | ||||
|         """Ensure that dynamic document plays nice with inheritance""" | ||||
| @@ -146,8 +146,8 @@ class DynamicDocTest(unittest.TestCase): | ||||
|         joe_bloggs.age = 20 | ||||
|         joe_bloggs.save() | ||||
|  | ||||
|         self.assertEquals(1, self.Person.objects(age=20).count()) | ||||
|         self.assertEquals(1, Employee.objects(age=20).count()) | ||||
|         self.assertEqual(1, self.Person.objects(age=20).count()) | ||||
|         self.assertEqual(1, Employee.objects(age=20).count()) | ||||
|  | ||||
|         joe_bloggs = self.Person.objects.first() | ||||
|         self.assertTrue(isinstance(joe_bloggs, Employee)) | ||||
| @@ -170,7 +170,7 @@ class DynamicDocTest(unittest.TestCase): | ||||
|         embedded_1.list_field = ['1', 2, {'hello': 'world'}] | ||||
|         doc.embedded_field = embedded_1 | ||||
|  | ||||
|         self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc", | ||||
|         self.assertEqual(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc", | ||||
|             "embedded_field": { | ||||
|                 "_types": ['Embedded'], "_cls": "Embedded", | ||||
|                 "string_field": "hello", | ||||
| @@ -182,11 +182,11 @@ class DynamicDocTest(unittest.TestCase): | ||||
|         doc.save() | ||||
|  | ||||
|         doc = Doc.objects.first() | ||||
|         self.assertEquals(doc.embedded_field.__class__, Embedded) | ||||
|         self.assertEquals(doc.embedded_field.string_field, "hello") | ||||
|         self.assertEquals(doc.embedded_field.int_field, 1) | ||||
|         self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'}) | ||||
|         self.assertEquals(doc.embedded_field.list_field, ['1', 2, {'hello': 'world'}]) | ||||
|         self.assertEqual(doc.embedded_field.__class__, Embedded) | ||||
|         self.assertEqual(doc.embedded_field.string_field, "hello") | ||||
|         self.assertEqual(doc.embedded_field.int_field, 1) | ||||
|         self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'}) | ||||
|         self.assertEqual(doc.embedded_field.list_field, ['1', 2, {'hello': 'world'}]) | ||||
|  | ||||
|     def test_complex_embedded_documents(self): | ||||
|         """Test complex dynamic embedded documents setups""" | ||||
| @@ -213,7 +213,7 @@ class DynamicDocTest(unittest.TestCase): | ||||
|         embedded_1.list_field = ['1', 2, embedded_2] | ||||
|         doc.embedded_field = embedded_1 | ||||
|  | ||||
|         self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc", | ||||
|         self.assertEqual(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc", | ||||
|             "embedded_field": { | ||||
|                 "_types": ['Embedded'], "_cls": "Embedded", | ||||
|                 "string_field": "hello", | ||||
| @@ -230,20 +230,20 @@ class DynamicDocTest(unittest.TestCase): | ||||
|         }) | ||||
|         doc.save() | ||||
|         doc = Doc.objects.first() | ||||
|         self.assertEquals(doc.embedded_field.__class__, Embedded) | ||||
|         self.assertEquals(doc.embedded_field.string_field, "hello") | ||||
|         self.assertEquals(doc.embedded_field.int_field, 1) | ||||
|         self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'}) | ||||
|         self.assertEquals(doc.embedded_field.list_field[0], '1') | ||||
|         self.assertEquals(doc.embedded_field.list_field[1], 2) | ||||
|         self.assertEqual(doc.embedded_field.__class__, Embedded) | ||||
|         self.assertEqual(doc.embedded_field.string_field, "hello") | ||||
|         self.assertEqual(doc.embedded_field.int_field, 1) | ||||
|         self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'}) | ||||
|         self.assertEqual(doc.embedded_field.list_field[0], '1') | ||||
|         self.assertEqual(doc.embedded_field.list_field[1], 2) | ||||
|  | ||||
|         embedded_field = doc.embedded_field.list_field[2] | ||||
|  | ||||
|         self.assertEquals(embedded_field.__class__, Embedded) | ||||
|         self.assertEquals(embedded_field.string_field, "hello") | ||||
|         self.assertEquals(embedded_field.int_field, 1) | ||||
|         self.assertEquals(embedded_field.dict_field, {'hello': 'world'}) | ||||
|         self.assertEquals(embedded_field.list_field, ['1', 2, {'hello': 'world'}]) | ||||
|         self.assertEqual(embedded_field.__class__, Embedded) | ||||
|         self.assertEqual(embedded_field.string_field, "hello") | ||||
|         self.assertEqual(embedded_field.int_field, 1) | ||||
|         self.assertEqual(embedded_field.dict_field, {'hello': 'world'}) | ||||
|         self.assertEqual(embedded_field.list_field, ['1', 2, {'hello': 'world'}]) | ||||
|  | ||||
|     def test_delta_for_dynamic_documents(self): | ||||
|         p = self.Person() | ||||
| @@ -252,18 +252,18 @@ class DynamicDocTest(unittest.TestCase): | ||||
|         p.save() | ||||
|  | ||||
|         p.age = 24 | ||||
|         self.assertEquals(p.age, 24) | ||||
|         self.assertEquals(p._get_changed_fields(), ['age']) | ||||
|         self.assertEquals(p._delta(), ({'age': 24}, {})) | ||||
|         self.assertEqual(p.age, 24) | ||||
|         self.assertEqual(p._get_changed_fields(), ['age']) | ||||
|         self.assertEqual(p._delta(), ({'age': 24}, {})) | ||||
|  | ||||
|         p = self.Person.objects(age=22).get() | ||||
|         p.age = 24 | ||||
|         self.assertEquals(p.age, 24) | ||||
|         self.assertEquals(p._get_changed_fields(), ['age']) | ||||
|         self.assertEquals(p._delta(), ({'age': 24}, {})) | ||||
|         self.assertEqual(p.age, 24) | ||||
|         self.assertEqual(p._get_changed_fields(), ['age']) | ||||
|         self.assertEqual(p._delta(), ({'age': 24}, {})) | ||||
|  | ||||
|         p.save() | ||||
|         self.assertEquals(1, self.Person.objects(age=24).count()) | ||||
|         self.assertEqual(1, self.Person.objects(age=24).count()) | ||||
|  | ||||
|     def test_delta(self): | ||||
|  | ||||
| @@ -275,40 +275,40 @@ class DynamicDocTest(unittest.TestCase): | ||||
|         doc.save() | ||||
|  | ||||
|         doc = Doc.objects.first() | ||||
|         self.assertEquals(doc._get_changed_fields(), []) | ||||
|         self.assertEquals(doc._delta(), ({}, {})) | ||||
|         self.assertEqual(doc._get_changed_fields(), []) | ||||
|         self.assertEqual(doc._delta(), ({}, {})) | ||||
|  | ||||
|         doc.string_field = 'hello' | ||||
|         self.assertEquals(doc._get_changed_fields(), ['string_field']) | ||||
|         self.assertEquals(doc._delta(), ({'string_field': 'hello'}, {})) | ||||
|         self.assertEqual(doc._get_changed_fields(), ['string_field']) | ||||
|         self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         doc.int_field = 1 | ||||
|         self.assertEquals(doc._get_changed_fields(), ['int_field']) | ||||
|         self.assertEquals(doc._delta(), ({'int_field': 1}, {})) | ||||
|         self.assertEqual(doc._get_changed_fields(), ['int_field']) | ||||
|         self.assertEqual(doc._delta(), ({'int_field': 1}, {})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         dict_value = {'hello': 'world', 'ping': 'pong'} | ||||
|         doc.dict_field = dict_value | ||||
|         self.assertEquals(doc._get_changed_fields(), ['dict_field']) | ||||
|         self.assertEquals(doc._delta(), ({'dict_field': dict_value}, {})) | ||||
|         self.assertEqual(doc._get_changed_fields(), ['dict_field']) | ||||
|         self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         list_value = ['1', 2, {'hello': 'world'}] | ||||
|         doc.list_field = list_value | ||||
|         self.assertEquals(doc._get_changed_fields(), ['list_field']) | ||||
|         self.assertEquals(doc._delta(), ({'list_field': list_value}, {})) | ||||
|         self.assertEqual(doc._get_changed_fields(), ['list_field']) | ||||
|         self.assertEqual(doc._delta(), ({'list_field': list_value}, {})) | ||||
|  | ||||
|         # Test unsetting | ||||
|         doc._changed_fields = [] | ||||
|         doc.dict_field = {} | ||||
|         self.assertEquals(doc._get_changed_fields(), ['dict_field']) | ||||
|         self.assertEquals(doc._delta(), ({}, {'dict_field': 1})) | ||||
|         self.assertEqual(doc._get_changed_fields(), ['dict_field']) | ||||
|         self.assertEqual(doc._delta(), ({}, {'dict_field': 1})) | ||||
|  | ||||
|         doc._changed_fields = [] | ||||
|         doc.list_field = [] | ||||
|         self.assertEquals(doc._get_changed_fields(), ['list_field']) | ||||
|         self.assertEquals(doc._delta(), ({}, {'list_field': 1})) | ||||
|         self.assertEqual(doc._get_changed_fields(), ['list_field']) | ||||
|         self.assertEqual(doc._delta(), ({}, {'list_field': 1})) | ||||
|  | ||||
|     def test_delta_recursive(self): | ||||
|         """Testing deltaing works with dynamic documents""" | ||||
| @@ -323,8 +323,8 @@ class DynamicDocTest(unittest.TestCase): | ||||
|         doc.save() | ||||
|  | ||||
|         doc = Doc.objects.first() | ||||
|         self.assertEquals(doc._get_changed_fields(), []) | ||||
|         self.assertEquals(doc._delta(), ({}, {})) | ||||
|         self.assertEqual(doc._get_changed_fields(), []) | ||||
|         self.assertEqual(doc._delta(), ({}, {})) | ||||
|  | ||||
|         embedded_1 = Embedded() | ||||
|         embedded_1.string_field = 'hello' | ||||
| @@ -333,7 +333,7 @@ class DynamicDocTest(unittest.TestCase): | ||||
|         embedded_1.list_field = ['1', 2, {'hello': 'world'}] | ||||
|         doc.embedded_field = embedded_1 | ||||
|  | ||||
|         self.assertEquals(doc._get_changed_fields(), ['embedded_field']) | ||||
|         self.assertEqual(doc._get_changed_fields(), ['embedded_field']) | ||||
|  | ||||
|         embedded_delta = { | ||||
|             'string_field': 'hello', | ||||
| @@ -341,28 +341,28 @@ class DynamicDocTest(unittest.TestCase): | ||||
|             'dict_field': {'hello': 'world'}, | ||||
|             'list_field': ['1', 2, {'hello': 'world'}] | ||||
|         } | ||||
|         self.assertEquals(doc.embedded_field._delta(), (embedded_delta, {})) | ||||
|         self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {})) | ||||
|         embedded_delta.update({ | ||||
|             '_types': ['Embedded'], | ||||
|             '_cls': 'Embedded', | ||||
|         }) | ||||
|         self.assertEquals(doc._delta(), ({'embedded_field': embedded_delta}, {})) | ||||
|         self.assertEqual(doc._delta(), ({'embedded_field': embedded_delta}, {})) | ||||
|  | ||||
|         doc.save() | ||||
|         doc.reload() | ||||
|  | ||||
|         doc.embedded_field.dict_field = {} | ||||
|         self.assertEquals(doc._get_changed_fields(), ['embedded_field.dict_field']) | ||||
|         self.assertEquals(doc.embedded_field._delta(), ({}, {'dict_field': 1})) | ||||
|         self.assertEqual(doc._get_changed_fields(), ['embedded_field.dict_field']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), ({}, {'dict_field': 1})) | ||||
|  | ||||
|         self.assertEquals(doc._delta(), ({}, {'embedded_field.dict_field': 1})) | ||||
|         self.assertEqual(doc._delta(), ({}, {'embedded_field.dict_field': 1})) | ||||
|         doc.save() | ||||
|         doc.reload() | ||||
|  | ||||
|         doc.embedded_field.list_field = [] | ||||
|         self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field']) | ||||
|         self.assertEquals(doc.embedded_field._delta(), ({}, {'list_field': 1})) | ||||
|         self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field': 1})) | ||||
|         self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), ({}, {'list_field': 1})) | ||||
|         self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field': 1})) | ||||
|         doc.save() | ||||
|         doc.reload() | ||||
|  | ||||
| @@ -373,8 +373,8 @@ class DynamicDocTest(unittest.TestCase): | ||||
|         embedded_2.list_field = ['1', 2, {'hello': 'world'}] | ||||
|  | ||||
|         doc.embedded_field.list_field = ['1', 2, embedded_2] | ||||
|         self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field']) | ||||
|         self.assertEquals(doc.embedded_field._delta(), ({ | ||||
|         self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), ({ | ||||
|             'list_field': ['1', 2, { | ||||
|                 '_cls': 'Embedded', | ||||
|                 '_types': ['Embedded'], | ||||
| @@ -385,7 +385,7 @@ class DynamicDocTest(unittest.TestCase): | ||||
|             }] | ||||
|         }, {})) | ||||
|  | ||||
|         self.assertEquals(doc._delta(), ({ | ||||
|         self.assertEqual(doc._delta(), ({ | ||||
|             'embedded_field.list_field': ['1', 2, { | ||||
|                 '_cls': 'Embedded', | ||||
|                  '_types': ['Embedded'], | ||||
| @@ -398,25 +398,25 @@ class DynamicDocTest(unittest.TestCase): | ||||
|         doc.save() | ||||
|         doc.reload() | ||||
|  | ||||
|         self.assertEquals(doc.embedded_field.list_field[2]._changed_fields, []) | ||||
|         self.assertEquals(doc.embedded_field.list_field[0], '1') | ||||
|         self.assertEquals(doc.embedded_field.list_field[1], 2) | ||||
|         self.assertEqual(doc.embedded_field.list_field[2]._changed_fields, []) | ||||
|         self.assertEqual(doc.embedded_field.list_field[0], '1') | ||||
|         self.assertEqual(doc.embedded_field.list_field[1], 2) | ||||
|         for k in doc.embedded_field.list_field[2]._fields: | ||||
|             self.assertEquals(doc.embedded_field.list_field[2][k], embedded_2[k]) | ||||
|             self.assertEqual(doc.embedded_field.list_field[2][k], embedded_2[k]) | ||||
|  | ||||
|         doc.embedded_field.list_field[2].string_field = 'world' | ||||
|         self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field']) | ||||
|         self.assertEquals(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {})) | ||||
|         self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {})) | ||||
|         self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {})) | ||||
|         self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {})) | ||||
|         doc.save() | ||||
|         doc.reload() | ||||
|         self.assertEquals(doc.embedded_field.list_field[2].string_field, 'world') | ||||
|         self.assertEqual(doc.embedded_field.list_field[2].string_field, 'world') | ||||
|  | ||||
|         # Test multiple assignments | ||||
|         doc.embedded_field.list_field[2].string_field = 'hello world' | ||||
|         doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] | ||||
|         self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field']) | ||||
|         self.assertEquals(doc.embedded_field._delta(), ({ | ||||
|         self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field']) | ||||
|         self.assertEqual(doc.embedded_field._delta(), ({ | ||||
|             'list_field': ['1', 2, { | ||||
|             '_types': ['Embedded'], | ||||
|             '_cls': 'Embedded', | ||||
| @@ -424,7 +424,7 @@ class DynamicDocTest(unittest.TestCase): | ||||
|             'int_field': 1, | ||||
|             'list_field': ['1', 2, {'hello': 'world'}], | ||||
|             'dict_field': {'hello': 'world'}}]}, {})) | ||||
|         self.assertEquals(doc._delta(), ({ | ||||
|         self.assertEqual(doc._delta(), ({ | ||||
|             'embedded_field.list_field': ['1', 2, { | ||||
|                 '_types': ['Embedded'], | ||||
|                 '_cls': 'Embedded', | ||||
| @@ -435,32 +435,32 @@ class DynamicDocTest(unittest.TestCase): | ||||
|             ]}, {})) | ||||
|         doc.save() | ||||
|         doc.reload() | ||||
|         self.assertEquals(doc.embedded_field.list_field[2].string_field, 'hello world') | ||||
|         self.assertEqual(doc.embedded_field.list_field[2].string_field, 'hello world') | ||||
|  | ||||
|         # Test list native methods | ||||
|         doc.embedded_field.list_field[2].list_field.pop(0) | ||||
|         self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {})) | ||||
|         self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {})) | ||||
|         doc.save() | ||||
|         doc.reload() | ||||
|  | ||||
|         doc.embedded_field.list_field[2].list_field.append(1) | ||||
|         self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {})) | ||||
|         self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {})) | ||||
|         doc.save() | ||||
|         doc.reload() | ||||
|         self.assertEquals(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1]) | ||||
|         self.assertEqual(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1]) | ||||
|  | ||||
|         doc.embedded_field.list_field[2].list_field.sort() | ||||
|         doc.embedded_field.list_field[2].list_field.sort(key=str)# use str as a key to allow comparing uncomperable types | ||||
|         doc.save() | ||||
|         doc.reload() | ||||
|         self.assertEquals(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}]) | ||||
|         self.assertEqual(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}]) | ||||
|  | ||||
|         del(doc.embedded_field.list_field[2].list_field[2]['hello']) | ||||
|         self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {})) | ||||
|         self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {})) | ||||
|         doc.save() | ||||
|         doc.reload() | ||||
|  | ||||
|         del(doc.embedded_field.list_field[2].list_field) | ||||
|         self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1})) | ||||
|         self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1})) | ||||
|  | ||||
|         doc.save() | ||||
|         doc.reload() | ||||
| @@ -470,8 +470,8 @@ class DynamicDocTest(unittest.TestCase): | ||||
|         doc.reload() | ||||
|  | ||||
|         doc.dict_field['embedded'].string_field = 'Hello World' | ||||
|         self.assertEquals(doc._get_changed_fields(), ['dict_field.embedded.string_field']) | ||||
|         self.assertEquals(doc._delta(), ({'dict_field.embedded.string_field': 'Hello World'}, {})) | ||||
|         self.assertEqual(doc._get_changed_fields(), ['dict_field.embedded.string_field']) | ||||
|         self.assertEqual(doc._delta(), ({'dict_field.embedded.string_field': 'Hello World'}, {})) | ||||
|  | ||||
|     def test_indexes(self): | ||||
|         """Ensure that indexes are used when meta[indexes] is specified. | ||||
| @@ -500,3 +500,34 @@ class DynamicDocTest(unittest.TestCase): | ||||
|         self.assertTrue([('_types', 1), ('category', 1), ('date', -1)] | ||||
|                         in info) | ||||
|         self.assertTrue([('_types', 1), ('date', -1)] in info) | ||||
|  | ||||
|     def test_dynamic_and_embedded(self): | ||||
|         """Ensure embedded documents play nicely""" | ||||
|  | ||||
|         class Address(EmbeddedDocument): | ||||
|             city = StringField() | ||||
|  | ||||
|         class Person(DynamicDocument): | ||||
|             name = StringField() | ||||
|             meta = {'allow_inheritance': True} | ||||
|  | ||||
|         Person.drop_collection() | ||||
|  | ||||
|         Person(name="Ross", address=Address(city="London")).save() | ||||
|  | ||||
|         person = Person.objects.first() | ||||
|         person.address.city = "Lundenne" | ||||
|         person.save() | ||||
|  | ||||
|         self.assertEqual(Person.objects.first().address.city, "Lundenne") | ||||
|  | ||||
|         person = Person.objects.first() | ||||
|         person.address = Address(city="Londinium") | ||||
|         person.save() | ||||
|  | ||||
|         self.assertEqual(Person.objects.first().address.city, "Londinium") | ||||
|  | ||||
|         person = Person.objects.first() | ||||
|         person.age = 35 | ||||
|         person.save() | ||||
|         self.assertEqual(Person.objects.first().age, 35) | ||||
|   | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,4 +1,5 @@ | ||||
| import unittest | ||||
|  | ||||
| import pymongo | ||||
| from pymongo import ReadPreference, ReplicaSetConnection | ||||
|  | ||||
| @@ -26,7 +27,7 @@ class ConnectionTest(unittest.TestCase): | ||||
|         if not isinstance(conn, ReplicaSetConnection): | ||||
|             return | ||||
|  | ||||
|         self.assertEquals(conn.read_preference, ReadPreference.SECONDARY_ONLY) | ||||
|         self.assertEqual(conn.read_preference, ReadPreference.SECONDARY_ONLY) | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
|   | ||||
| @@ -212,9 +212,9 @@ class SignalTests(unittest.TestCase): | ||||
|  | ||||
|         # The output of this signal is not entirely deterministic. The reloaded | ||||
|         # object will have an object ID. Hence, we only check part of the output | ||||
|         self.assertEquals(signal_output[3], | ||||
|         self.assertEqual(signal_output[3], | ||||
|             "pre_bulk_insert signal, [<Author: Bill Shakespeare>]") | ||||
|         self.assertEquals(signal_output[-2:], | ||||
|         self.assertEqual(signal_output[-2:], | ||||
|             ["post_bulk_insert signal, [<Author: Bill Shakespeare>]", | ||||
|              "Is loaded",]) | ||||
|  | ||||
|   | ||||
		Reference in New Issue
	
	Block a user