Compare commits
	
		
			193 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 31ec7907b5 | ||
|  | 12f3f8c694 | ||
|  | 79098e997e | ||
|  | dc1849bad5 | ||
|  | e2d826c412 | ||
|  | e6d796832e | ||
|  | 6f0a6df4f6 | ||
|  | 7a877a00d5 | ||
|  | e8604d100e | ||
|  | 1647441ce8 | ||
|  | 9f8d6b3a00 | ||
|  | 4b2ad25405 | ||
|  | 3ce163b1a0 | ||
|  | 7c1ee28f13 | ||
|  | 2645e43da1 | ||
|  | 59bfe551a3 | ||
|  | e2c78047b1 | ||
|  | 6a4351e44f | ||
|  | adb60ef1ac | ||
|  | 3090adac04 | ||
|  | b9253d86cc | ||
|  | ab4d4e6230 | ||
|  | 7cd38c56c6 | ||
|  | 864053615b | ||
|  | db2366f112 | ||
|  | 4defc82192 | ||
|  | 5949970a95 | ||
|  | 0ea4abda81 | ||
|  | 5c6035d636 | ||
|  | a2183e3dcc | ||
|  | 99637151b5 | ||
|  | a8e787c120 | ||
|  | 53339c7c72 | ||
|  | 3534bf7d70 | ||
|  | 1cf3989664 | ||
|  | fd296918da | ||
|  | 8ad1f03dc5 | ||
|  | fe7e17dbd5 | ||
|  | d582394a42 | ||
|  | 02ef0df019 | ||
|  | 0dfd6aa518 | ||
|  | 0b23bc9cf2 | ||
|  | f108c4288e | ||
|  | 9b9696aefd | ||
|  | 576e198ece | ||
|  | 52f85aab18 | ||
|  | ab60fd0490 | ||
|  | d79ae30f31 | ||
|  | f27debe7f9 | ||
|  | 735e043ff6 | ||
|  | 6e7f2b73cf | ||
|  | d645ce9745 | ||
|  | 7c08c140da | ||
|  | 81d402dc17 | ||
|  | 966fa12358 | ||
|  | 87792e1921 | ||
|  | 4c8296acc6 | ||
|  | 9989da07ed | ||
|  | 1c5e6a3425 | ||
|  | eedf908770 | ||
|  | 5c9ef41403 | ||
|  | 0bf2ad5b67 | ||
|  | a0e3f382cd | ||
|  | f09c39b5d7 | ||
|  | 89c67bf259 | ||
|  | ea666d4607 | ||
|  | b8af154439 | ||
|  | f594ece32a | ||
|  | 03beb6852a | ||
|  | ab9e9a3329 | ||
|  | a4b09344af | ||
|  | 8cb8aa392c | ||
|  | 3255519792 | ||
|  | 7e64bb2503 | ||
|  | 86a78402c3 | ||
|  | ba276452fb | ||
|  | 4ffa8d0124 | ||
|  | 4bc5082681 | ||
|  | 0e3c34e1da | ||
|  | 658b3784ae | ||
|  | 0526f577ff | ||
|  | bb1b9bc1d3 | ||
|  | b1eeb77ddc | ||
|  | 999d4a7676 | ||
|  | 1b80193aac | ||
|  | be8d39a48c | ||
|  | a2f3d70f28 | ||
|  | 676a7bf712 | ||
|  | e990a6c70c | ||
|  | 90fa0f6c4a | ||
|  | 22010d7d95 | ||
|  | 66279bd90f | ||
|  | 19da228855 | ||
|  | 9e67941bad | ||
|  | 0454fc74e9 | ||
|  | 2f6b1c7611 | ||
|  | f00bed6058 | ||
|  | 529c522594 | ||
|  | 2bb9493fcf | ||
|  | 839ed8a64a | ||
|  | 017a31ffd0 | ||
|  | 83b961c84d | ||
|  | fa07423ca5 | ||
|  | dd4af2df81 | ||
|  | 44bd8cb85b | ||
|  | 52d80ac23c | ||
|  | 43a5d73e14 | ||
|  | abc764951d | ||
|  | 9cc6164026 | ||
|  | 475488b9f2 | ||
|  | 95b1783834 | ||
|  | 12c8b5c0b9 | ||
|  | f99b7a811b | ||
|  | 0575abab23 | ||
|  | 9eebcf7beb | ||
|  | ed74477150 | ||
|  | 2801b38c75 | ||
|  | dc3fea875e | ||
|  | aab8c2b687 | ||
|  | 3577773af3 | ||
|  | dd023edc0f | ||
|  | 8ac9e6dc19 | ||
|  | f45d4d781d | ||
|  | c95652d6a8 | ||
|  | 97b37f75d3 | ||
|  | 95dae48778 | ||
|  | 73635033bd | ||
|  | c1619d2a62 | ||
|  | b87ef982f6 | ||
|  | 91aa90ad4a | ||
|  | 4b3cea9e78 | ||
|  | 2420b5e937 | ||
|  | f23a976bea | ||
|  | 4226cd08f1 | ||
|  | 7a230f1693 | ||
|  | a43d0d4612 | ||
|  | 78a40a0c70 | ||
|  | 2c69d8f0b0 | ||
|  | 0018c38b83 | ||
|  | 8df81571fc | ||
|  | d1add62a06 | ||
|  | c419f3379a | ||
|  | 69d57209f7 | ||
|  | 7ca81d6fb8 | ||
|  | 8a046bfa5d | ||
|  | 3628a7653c | ||
|  | 48f988acd7 | ||
|  | 6526923345 | ||
|  | 24fd1acce6 | ||
|  | cbb9235dc5 | ||
|  | 19ec2c9bc9 | ||
|  | 6459d4c0b6 | ||
|  | 1304f2721f | ||
|  | 8bde0c0e53 | ||
|  | 598ffd3e5c | ||
|  | 1a4533a9cf | ||
|  | 601f0eb168 | ||
|  | 3070e0bf5d | ||
|  | 83c11a9834 | ||
|  | 5c912b930e | ||
|  | 1b17fb0ae7 | ||
|  | d83e67c121 | ||
|  | ae39ed94c9 | ||
|  | 1e51180d42 | ||
|  | 87ba69d02e | ||
|  | 8879d5560b | ||
|  | c1621ee39c | ||
|  | b0aa98edb4 | ||
|  | a7a2fe0216 | ||
|  | 8e50f5fa3c | ||
|  | 31793520bf | ||
|  | 0b6b0368c5 | ||
|  | d1d30a9280 | ||
|  | 420c6f2d1e | ||
|  | 34f06c4971 | ||
|  | 9cc4bbd49d | ||
|  | f66b312869 | ||
|  | 2405ba8708 | ||
|  | a91b6bff8b | ||
|  | 450dc11a68 | ||
|  | 1ce2f84ce5 | ||
|  | f55b241cfa | ||
|  | 34d08ce8ef | ||
|  | 4f5aa8c43b | ||
|  | 27b375060d | ||
|  | cbfdc401f7 | ||
|  | b58bf3e0ce | ||
|  | 1fff7e9aca | ||
|  | 494b981b13 | ||
|  | dd93995bd0 | ||
|  | b3bb4add9c | ||
|  | d305e71c27 | ||
|  | 0d92baa670 | 
							
								
								
									
										23
									
								
								.travis.yml
									
									
									
									
									
								
							
							
						
						
									
										23
									
								
								.travis.yml
									
									
									
									
									
								
							| @@ -1,12 +1,29 @@ | |||||||
| # http://travis-ci.org/#!/MongoEngine/mongoengine | # http://travis-ci.org/#!/MongoEngine/mongoengine | ||||||
| language: python | language: python | ||||||
|  | services: mongodb | ||||||
| python: | python: | ||||||
|  |     - 2.5 | ||||||
|     - 2.6 |     - 2.6 | ||||||
|     - 2.7 |     - 2.7 | ||||||
|  |     - 3.1 | ||||||
|  |     - 3.2 | ||||||
|  | env: | ||||||
|  |   - PYMONGO=dev | ||||||
|  |   - PYMONGO=2.3 | ||||||
|  |   - PYMONGO=2.2 | ||||||
| install: | install: | ||||||
|     - sudo apt-get install zlib1g zlib1g-dev |     - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo apt-get install zlib1g zlib1g-dev; fi | ||||||
|     - sudo ln -s /usr/lib/i386-linux-gnu/libz.so /usr/lib/ |     - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then sudo ln -s /usr/lib/i386-linux-gnu/libz.so /usr/lib/; fi | ||||||
|     - pip install PIL --use-mirrors ; true |     - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install PIL --use-mirrors ; true; fi | ||||||
|  |     - if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then pip install PIL --use-mirrors ; true; fi | ||||||
|  |     - if [[ $PYMONGO == 'dev' ]]; then pip install https://github.com/mongodb/mongo-python-driver/tarball/master; true; fi | ||||||
|  |     - if [[ $PYMONGO != 'dev' ]]; then pip install pymongo==$PYMONGO --use-mirrors; true; fi | ||||||
|     - python setup.py install |     - python setup.py install | ||||||
| script: | script: | ||||||
|     - python setup.py test |     - python setup.py test | ||||||
|  | notifications: | ||||||
|  |   irc: "irc.freenode.org#mongoengine" | ||||||
|  | branches: | ||||||
|  |   only: | ||||||
|  |     - master | ||||||
|  |     - 0.7 | ||||||
							
								
								
									
										14
									
								
								AUTHORS
									
									
									
									
									
								
							
							
						
						
									
										14
									
								
								AUTHORS
									
									
									
									
									
								
							| @@ -8,6 +8,7 @@ Florian Schlachter <flori@n-schlachter.de> | |||||||
| Steve Challis <steve@stevechallis.com> | Steve Challis <steve@stevechallis.com> | ||||||
| Wilson Júnior <wilsonpjunior@gmail.com> | Wilson Júnior <wilsonpjunior@gmail.com> | ||||||
| Dan Crosta https://github.com/dcrosta | Dan Crosta https://github.com/dcrosta | ||||||
|  | Laine Herron https://github.com/LaineHerron | ||||||
|  |  | ||||||
| CONTRIBUTORS | CONTRIBUTORS | ||||||
|  |  | ||||||
| @@ -110,3 +111,16 @@ that much better: | |||||||
|  * Andrey Fedoseev |  * Andrey Fedoseev | ||||||
|  * aparajita |  * aparajita | ||||||
|  * Tristan Escalada |  * Tristan Escalada | ||||||
|  |  * Alexander Koshelev | ||||||
|  |  * Jaime Irurzun | ||||||
|  |  * Alexandre González | ||||||
|  |  * Thomas Steinacher | ||||||
|  |  * Tommi Komulainen | ||||||
|  |  * Peter Landry | ||||||
|  |  * biszkoptwielki | ||||||
|  |  * Anton Kolechkin | ||||||
|  |  * Sergey Nikitin | ||||||
|  |  * psychogenic | ||||||
|  |  * Stefan Wójcik | ||||||
|  |  * dimonb | ||||||
|  |  * Garry Polley | ||||||
|   | |||||||
							
								
								
									
										61
									
								
								CONTRIBUTING.rst
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										61
									
								
								CONTRIBUTING.rst
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,61 @@ | |||||||
|  | Contributing to MongoEngine | ||||||
|  | =========================== | ||||||
|  |  | ||||||
|  | MongoEngine has a large `community | ||||||
|  | <https://raw.github.com/MongoEngine/mongoengine/master/AUTHORS>`_ and | ||||||
|  | contributions are always encouraged. Contributions can be as simple as | ||||||
|  | minor tweaks to the documentation. Please read these guidelines before | ||||||
|  | sending a pull request. | ||||||
|  |  | ||||||
|  | Bugfixes and New Features | ||||||
|  | ------------------------- | ||||||
|  |  | ||||||
|  | Before starting to write code, look for existing `tickets | ||||||
|  | <https://github.com/MongoEngine/mongoengine/issues?state=open>`_ or `create one | ||||||
|  | <https://github.com/MongoEngine/mongoengine/issues>`_ for your specific | ||||||
|  | issue or feature request. That way you avoid working on something | ||||||
|  | that might not be of interest or that has already been addressed.  If in doubt | ||||||
|  | post to the `user group <http://groups.google.com/group/mongoengine-users>` | ||||||
|  |  | ||||||
|  | Supported Interpreters | ||||||
|  | ---------------------- | ||||||
|  |  | ||||||
|  | PyMongo supports CPython 2.5 and newer. Language | ||||||
|  | features not supported by all interpreters can not be used. | ||||||
|  | Please also ensure that your code is properly converted by | ||||||
|  | `2to3 <http://docs.python.org/library/2to3.html>`_ for Python 3 support. | ||||||
|  |  | ||||||
|  | Style Guide | ||||||
|  | ----------- | ||||||
|  |  | ||||||
|  | MongoEngine aims to follow `PEP8 <http://www.python.org/dev/peps/pep-0008/>`_ | ||||||
|  | including 4 space indents and 79 character line limits. | ||||||
|  |  | ||||||
|  | Testing | ||||||
|  | ------- | ||||||
|  |  | ||||||
|  | All tests are run on `Travis <http://travis-ci.org/MongoEngine/mongoengine>`_ | ||||||
|  | and any pull requests are automatically tested by Travis. Any pull requests | ||||||
|  | without tests will take longer to be integrated and might be refused. | ||||||
|  |  | ||||||
|  | General Guidelines | ||||||
|  | ------------------ | ||||||
|  |  | ||||||
|  | - Avoid backward breaking changes if at all possible. | ||||||
|  | - Write inline documentation for new classes and methods. | ||||||
|  | - Write tests and make sure they pass (make sure you have a mongod | ||||||
|  |   running on the default port, then execute ``python setup.py test`` | ||||||
|  |   from the cmd line to run the test suite). | ||||||
|  | - Add yourself to AUTHORS.rst :) | ||||||
|  |  | ||||||
|  | Documentation | ||||||
|  | ------------- | ||||||
|  |  | ||||||
|  | To contribute to the `API documentation | ||||||
|  | <http://docs.mongoengine.org/en/latest/apireference.html>`_ | ||||||
|  | just make your changes to the inline documentation of the appropriate | ||||||
|  | `source code <https://github.com/MongoEngine/mongoengine>`_ or `rst file | ||||||
|  | <https://github.com/MongoEngine/mongoengine/tree/master/docs>`_ in a | ||||||
|  | branch and submit a `pull request <https://help.github.com/articles/using-pull-requests>`_. | ||||||
|  | You might also use the github `Edit <https://github.com/blog/844-forking-with-the-edit-button>`_ | ||||||
|  | button. | ||||||
							
								
								
									
										2
									
								
								LICENSE
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								LICENSE
									
									
									
									
									
								
							| @@ -1,4 +1,4 @@ | |||||||
| Copyright (c) 2009-2010 Harry Marr | Copyright (c) 2009-2012 See AUTHORS | ||||||
|  |  | ||||||
| Permission is hereby granted, free of charge, to any person | Permission is hereby granted, free of charge, to any person | ||||||
| obtaining a copy of this software and associated documentation | obtaining a copy of this software and associated documentation | ||||||
|   | |||||||
							
								
								
									
										15
									
								
								README.rst
									
									
									
									
									
								
							
							
						
						
									
										15
									
								
								README.rst
									
									
									
									
									
								
							| @@ -2,6 +2,7 @@ | |||||||
| MongoEngine | MongoEngine | ||||||
| =========== | =========== | ||||||
| :Info: MongoEngine is an ORM-like layer on top of PyMongo. | :Info: MongoEngine is an ORM-like layer on top of PyMongo. | ||||||
|  | :Repository: https://github.com/MongoEngine/mongoengine | ||||||
| :Author: Harry Marr (http://github.com/hmarr) | :Author: Harry Marr (http://github.com/hmarr) | ||||||
| :Maintainer: Ross Lawley (http://github.com/rozza) | :Maintainer: Ross Lawley (http://github.com/rozza) | ||||||
|  |  | ||||||
| @@ -62,11 +63,6 @@ Some simple examples of what MongoEngine code looks like:: | |||||||
|     ...         print 'Link:', post.url |     ...         print 'Link:', post.url | ||||||
|     ...     print |     ...     print | ||||||
|     ... |     ... | ||||||
|     === Using MongoEngine === |  | ||||||
|     See the tutorial |  | ||||||
|  |  | ||||||
|     === MongoEngine Docs === |  | ||||||
|     Link: hmarr.com/mongoengine |  | ||||||
|  |  | ||||||
|     >>> len(BlogPost.objects) |     >>> len(BlogPost.objects) | ||||||
|     2 |     2 | ||||||
| @@ -84,7 +80,7 @@ Some simple examples of what MongoEngine code looks like:: | |||||||
| Tests | Tests | ||||||
| ===== | ===== | ||||||
| To run the test suite, ensure you are running a local instance of MongoDB on | To run the test suite, ensure you are running a local instance of MongoDB on | ||||||
| the standard port, and run ``python setup.py test``. | the standard port, and run: ``python setup.py test``. | ||||||
|  |  | ||||||
| Community | Community | ||||||
| ========= | ========= | ||||||
| @@ -92,11 +88,8 @@ Community | |||||||
|   <http://groups.google.com/group/mongoengine-users>`_ |   <http://groups.google.com/group/mongoengine-users>`_ | ||||||
| - `MongoEngine Developers mailing list | - `MongoEngine Developers mailing list | ||||||
|   <http://groups.google.com/group/mongoengine-dev>`_ |   <http://groups.google.com/group/mongoengine-dev>`_ | ||||||
| - `#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_ | - `#mongoengine IRC channel <http://webchat.freenode.net/?channels=mongoengine>`_ | ||||||
|  |  | ||||||
| Contributing | Contributing | ||||||
| ============ | ============ | ||||||
| The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ - to | We welcome contributions! see  the`Contribution guidelines <https://github.com/MongoEngine/mongoengine/blob/master/CONTRIBUTING.rst>`_ | ||||||
| contribute to the project, fork it on GitHub and send a pull request, all |  | ||||||
| contributions and suggestions are welcome! |  | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										49
									
								
								benchmark.py
									
									
									
									
									
								
							
							
						
						
									
										49
									
								
								benchmark.py
									
									
									
									
									
								
							| @@ -28,47 +28,64 @@ def main(): | |||||||
|  |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |     ---------------------------------------------------------------------------------------------------- | ||||||
|     Creating 10000 dictionaries - Pymongo |     Creating 10000 dictionaries - Pymongo | ||||||
|     1.1141769886 |     3.86744189262 | ||||||
|     ---------------------------------------------------------------------------------------------------- |     ---------------------------------------------------------------------------------------------------- | ||||||
|     Creating 10000 dictionaries - MongoEngine |     Creating 10000 dictionaries - MongoEngine | ||||||
|     2.37724113464 |     6.23374891281 | ||||||
|     ---------------------------------------------------------------------------------------------------- |     ---------------------------------------------------------------------------------------------------- | ||||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False |     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False | ||||||
|     1.92479610443 |     5.33027005196 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False | ||||||
|  |     pass - No Cascade | ||||||
|  |  | ||||||
|     0.5.X |     0.5.X | ||||||
|     ---------------------------------------------------------------------------------------------------- |     ---------------------------------------------------------------------------------------------------- | ||||||
|     Creating 10000 dictionaries - Pymongo |     Creating 10000 dictionaries - Pymongo | ||||||
|     1.10552310944 |     3.89597702026 | ||||||
|     ---------------------------------------------------------------------------------------------------- |     ---------------------------------------------------------------------------------------------------- | ||||||
|     Creating 10000 dictionaries - MongoEngine |     Creating 10000 dictionaries - MongoEngine | ||||||
|     16.5169169903 |     21.7735359669 | ||||||
|     ---------------------------------------------------------------------------------------------------- |     ---------------------------------------------------------------------------------------------------- | ||||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False |     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False | ||||||
|     14.9446101189 |     19.8670389652 | ||||||
|     ---------------------------------------------------------------------------------------------------- |     ---------------------------------------------------------------------------------------------------- | ||||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False |     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False | ||||||
|     14.912801981 |     pass - No Cascade | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, force=True |  | ||||||
|     14.9617750645 |  | ||||||
|  |  | ||||||
|     Performance |     0.6.X | ||||||
|     ---------------------------------------------------------------------------------------------------- |     ---------------------------------------------------------------------------------------------------- | ||||||
|     Creating 10000 dictionaries - Pymongo |     Creating 10000 dictionaries - Pymongo | ||||||
|     1.10072994232 |     3.81559205055 | ||||||
|     ---------------------------------------------------------------------------------------------------- |     ---------------------------------------------------------------------------------------------------- | ||||||
|     Creating 10000 dictionaries - MongoEngine |     Creating 10000 dictionaries - MongoEngine | ||||||
|     5.27341103554 |     10.0446798801 | ||||||
|     ---------------------------------------------------------------------------------------------------- |     ---------------------------------------------------------------------------------------------------- | ||||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False |     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False | ||||||
|     4.49365401268 |     9.51354718208 | ||||||
|     ---------------------------------------------------------------------------------------------------- |     ---------------------------------------------------------------------------------------------------- | ||||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False |     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False | ||||||
|     4.43459296227 |     9.02567505836 | ||||||
|     ---------------------------------------------------------------------------------------------------- |     ---------------------------------------------------------------------------------------------------- | ||||||
|     Creating 10000 dictionaries - MongoEngine, force=True |     Creating 10000 dictionaries - MongoEngine, force=True | ||||||
|     4.40114378929 |     8.44933390617 | ||||||
|  |  | ||||||
|  |     0.7.X | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - Pymongo | ||||||
|  |     3.78801012039 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine | ||||||
|  |     9.73050498962 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False | ||||||
|  |     8.33456707001 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False | ||||||
|  |     8.37778115273 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine, force=True | ||||||
|  |     8.36906409264 | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     setup = """ |     setup = """ | ||||||
|   | |||||||
| @@ -47,25 +47,28 @@ Querying | |||||||
| Fields | Fields | ||||||
| ====== | ====== | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.StringField | .. autoclass:: mongoengine.BinaryField | ||||||
| .. autoclass:: mongoengine.URLField | .. autoclass:: mongoengine.BooleanField | ||||||
| .. autoclass:: mongoengine.EmailField |  | ||||||
| .. autoclass:: mongoengine.IntField |  | ||||||
| .. autoclass:: mongoengine.FloatField |  | ||||||
| .. autoclass:: mongoengine.DecimalField |  | ||||||
| .. autoclass:: mongoengine.DateTimeField |  | ||||||
| .. autoclass:: mongoengine.ComplexDateTimeField | .. autoclass:: mongoengine.ComplexDateTimeField | ||||||
| .. autoclass:: mongoengine.ListField | .. autoclass:: mongoengine.DateTimeField | ||||||
| .. autoclass:: mongoengine.SortedListField | .. autoclass:: mongoengine.DecimalField | ||||||
| .. autoclass:: mongoengine.DictField | .. autoclass:: mongoengine.DictField | ||||||
|  | .. autoclass:: mongoengine.DynamicField | ||||||
|  | .. autoclass:: mongoengine.EmailField | ||||||
|  | .. autoclass:: mongoengine.EmbeddedDocumentField | ||||||
|  | .. autoclass:: mongoengine.FileField | ||||||
|  | .. autoclass:: mongoengine.FloatField | ||||||
|  | .. autoclass:: mongoengine.GenericEmbeddedDocumentField | ||||||
|  | .. autoclass:: mongoengine.GenericReferenceField | ||||||
|  | .. autoclass:: mongoengine.GeoPointField | ||||||
|  | .. autoclass:: mongoengine.ImageField | ||||||
|  | .. autoclass:: mongoengine.IntField | ||||||
|  | .. autoclass:: mongoengine.ListField | ||||||
| .. autoclass:: mongoengine.MapField | .. autoclass:: mongoengine.MapField | ||||||
| .. autoclass:: mongoengine.ObjectIdField | .. autoclass:: mongoengine.ObjectIdField | ||||||
| .. autoclass:: mongoengine.ReferenceField | .. autoclass:: mongoengine.ReferenceField | ||||||
| .. autoclass:: mongoengine.GenericReferenceField |  | ||||||
| .. autoclass:: mongoengine.EmbeddedDocumentField |  | ||||||
| .. autoclass:: mongoengine.GenericEmbeddedDocumentField |  | ||||||
| .. autoclass:: mongoengine.BooleanField |  | ||||||
| .. autoclass:: mongoengine.FileField |  | ||||||
| .. autoclass:: mongoengine.BinaryField |  | ||||||
| .. autoclass:: mongoengine.GeoPointField |  | ||||||
| .. autoclass:: mongoengine.SequenceField | .. autoclass:: mongoengine.SequenceField | ||||||
|  | .. autoclass:: mongoengine.SortedListField | ||||||
|  | .. autoclass:: mongoengine.StringField | ||||||
|  | .. autoclass:: mongoengine.URLField | ||||||
|  | .. autoclass:: mongoengine.UUIDField | ||||||
|   | |||||||
| @@ -2,8 +2,107 @@ | |||||||
| Changelog | Changelog | ||||||
| ========= | ========= | ||||||
|  |  | ||||||
| Changes in 0.6.13 | Changes in 0.7.X | ||||||
| ================ | ================ | ||||||
|  | - Unicode fix for repr (MongoEngine/mongoengine#133) | ||||||
|  | - Allow updates with match operators (MongoEngine/mongoengine#144) | ||||||
|  | - Updated URLField - now can have a override the regex (MongoEngine/mongoengine#136) | ||||||
|  | - Allow Django AuthenticationBackends to work with Django user (hmarr/mongoengine#573) | ||||||
|  | - Fixed reload issue with ReferenceField where dbref=False (MongoEngine/mongoengine#138) | ||||||
|  |  | ||||||
|  | Changes in 0.7.5 | ||||||
|  | ================ | ||||||
|  | - ReferenceFields with dbref=False use ObjectId instead of strings (MongoEngine/mongoengine#134) | ||||||
|  |   See ticket for upgrade notes (https://github.com/MongoEngine/mongoengine/issues/134) | ||||||
|  |  | ||||||
|  | Changes in 0.7.4 | ||||||
|  | ================ | ||||||
|  | - Fixed index inheritance issues - firmed up testcases (MongoEngine/mongoengine#123) (MongoEngine/mongoengine#125) | ||||||
|  |  | ||||||
|  | Changes in 0.7.3 | ||||||
|  | ================ | ||||||
|  | - Reverted EmbeddedDocuments meta handling - now can turn off inheritance (MongoEngine/mongoengine#119) | ||||||
|  |  | ||||||
|  | Changes in 0.7.2 | ||||||
|  | ================ | ||||||
|  | - Update index spec generation so its not destructive (MongoEngine/mongoengine#113) | ||||||
|  |  | ||||||
|  | Changes in 0.7.1 | ||||||
|  | ================= | ||||||
|  | - Fixed index spec inheritance (MongoEngine/mongoengine#111) | ||||||
|  |  | ||||||
|  | Changes in 0.7.0 | ||||||
|  | ================= | ||||||
|  | - Updated queryset.delete so you can use with skip / limit (MongoEngine/mongoengine#107) | ||||||
|  | - Updated index creation allows kwargs to be passed through refs (MongoEngine/mongoengine#104) | ||||||
|  | - Fixed Q object merge edge case (MongoEngine/mongoengine#109) | ||||||
|  | - Fixed reloading on sharded documents (hmarr/mongoengine#569) | ||||||
|  | - Added NotUniqueError for duplicate keys (MongoEngine/mongoengine#62) | ||||||
|  | - Added custom collection / sequence naming for SequenceFields (MongoEngine/mongoengine#92) | ||||||
|  | - Fixed UnboundLocalError in composite index with pk field (MongoEngine/mongoengine#88) | ||||||
|  | - Updated ReferenceField's to optionally store ObjectId strings | ||||||
|  |   this will become the default in 0.8 (MongoEngine/mongoengine#89) | ||||||
|  | - Added FutureWarning - save will default to `cascade=False` in 0.8 | ||||||
|  | - Added example of indexing embedded document fields (MongoEngine/mongoengine#75) | ||||||
|  | - Fixed ImageField resizing when forcing size (MongoEngine/mongoengine#80) | ||||||
|  | - Add flexibility for fields handling bad data (MongoEngine/mongoengine#78) | ||||||
|  | - Embedded Documents no longer handle meta definitions | ||||||
|  | - Use weakref proxies in base lists / dicts (MongoEngine/mongoengine#74) | ||||||
|  | - Improved queryset filtering (hmarr/mongoengine#554) | ||||||
|  | - Fixed Dynamic Documents and Embedded Documents (hmarr/mongoengine#561) | ||||||
|  | - Fixed abstract classes and shard keys (MongoEngine/mongoengine#64) | ||||||
|  | - Fixed Python 2.5 support | ||||||
|  | - Added Python 3 support (thanks to Laine Heron) | ||||||
|  |  | ||||||
|  | Changes in 0.6.20 | ||||||
|  | ================= | ||||||
|  | - Added support for distinct and db_alias (MongoEngine/mongoengine#59) | ||||||
|  | - Improved support for chained querysets when constraining the same fields (hmarr/mongoengine#554) | ||||||
|  | - Fixed BinaryField lookup re (MongoEngine/mongoengine#48) | ||||||
|  |  | ||||||
|  | Changes in 0.6.19 | ||||||
|  | ================= | ||||||
|  |  | ||||||
|  | - Added Binary support to UUID (MongoEngine/mongoengine#47) | ||||||
|  | - Fixed MapField lookup for fields without declared lookups (MongoEngine/mongoengine#46) | ||||||
|  | - Fixed BinaryField python value issue (MongoEngine/mongoengine#48) | ||||||
|  | - Fixed SequenceField non numeric value lookup (MongoEngine/mongoengine#41) | ||||||
|  | - Fixed queryset manager issue (MongoEngine/mongoengine#52) | ||||||
|  | - Fixed FileField comparision (hmarr/mongoengine#547) | ||||||
|  |  | ||||||
|  | Changes in 0.6.18 | ||||||
|  | ================= | ||||||
|  | - Fixed recursion loading bug in _get_changed_fields | ||||||
|  |  | ||||||
|  | Changes in 0.6.17 | ||||||
|  | ================= | ||||||
|  | - Fixed issue with custom queryset manager expecting explict variable names | ||||||
|  |  | ||||||
|  | Changes in 0.6.16 | ||||||
|  | ================= | ||||||
|  | - Fixed issue where db_alias wasn't inherited | ||||||
|  |  | ||||||
|  | Changes in 0.6.15 | ||||||
|  | ================= | ||||||
|  | - Updated validation error messages | ||||||
|  | - Added support for null / zero / false values in item_frequencies | ||||||
|  | - Fixed cascade save edge case | ||||||
|  | - Fixed geo index creation through reference fields | ||||||
|  | - Added support for args / kwargs when using @queryset_manager | ||||||
|  | - Deref list custom id fix | ||||||
|  |  | ||||||
|  | Changes in 0.6.14 | ||||||
|  | ================= | ||||||
|  | - Fixed error dict with nested validation | ||||||
|  | - Fixed Int/Float fields and not equals None | ||||||
|  | - Exclude tests from installation | ||||||
|  | - Allow tuples for index meta | ||||||
|  | - Fixed use of str in instance checks | ||||||
|  | - Fixed unicode support in transform update | ||||||
|  | - Added support for add_to_set and each | ||||||
|  |  | ||||||
|  | Changes in 0.6.13 | ||||||
|  | ================= | ||||||
| - Fixed EmbeddedDocument db_field validation issue | - Fixed EmbeddedDocument db_field validation issue | ||||||
| - Fixed StringField unicode issue | - Fixed StringField unicode issue | ||||||
| - Fixes __repr__ modifying the cursor | - Fixes __repr__ modifying the cursor | ||||||
| @@ -39,7 +138,7 @@ Changes in 0.6.8 | |||||||
| ================ | ================ | ||||||
| - Fixed FileField losing reference when no default set | - Fixed FileField losing reference when no default set | ||||||
| - Removed possible race condition from FileField (grid_file) | - Removed possible race condition from FileField (grid_file) | ||||||
| - Added assignment to save, can now do: b = MyDoc(**kwargs).save() | - Added assignment to save, can now do: `b = MyDoc(**kwargs).save()` | ||||||
| - Added support for pull operations on nested EmbeddedDocuments | - Added support for pull operations on nested EmbeddedDocuments | ||||||
| - Added support for choices with GenericReferenceFields | - Added support for choices with GenericReferenceFields | ||||||
| - Added support for choices with GenericEmbeddedDocumentFields | - Added support for choices with GenericEmbeddedDocumentFields | ||||||
|   | |||||||
| @@ -62,28 +62,31 @@ not provided. Default values may optionally be a callable, which will be called | |||||||
| to retrieve the value (such as in the above example). The field types available | to retrieve the value (such as in the above example). The field types available | ||||||
| are as follows: | are as follows: | ||||||
|  |  | ||||||
| * :class:`~mongoengine.StringField` | * :class:`~mongoengine.BinaryField` | ||||||
| * :class:`~mongoengine.URLField` | * :class:`~mongoengine.BooleanField` | ||||||
| * :class:`~mongoengine.EmailField` |  | ||||||
| * :class:`~mongoengine.IntField` |  | ||||||
| * :class:`~mongoengine.FloatField` |  | ||||||
| * :class:`~mongoengine.DecimalField` |  | ||||||
| * :class:`~mongoengine.DateTimeField` |  | ||||||
| * :class:`~mongoengine.ComplexDateTimeField` | * :class:`~mongoengine.ComplexDateTimeField` | ||||||
| * :class:`~mongoengine.ListField` | * :class:`~mongoengine.DateTimeField` | ||||||
| * :class:`~mongoengine.SortedListField` | * :class:`~mongoengine.DecimalField` | ||||||
| * :class:`~mongoengine.DictField` | * :class:`~mongoengine.DictField` | ||||||
|  | * :class:`~mongoengine.DynamicField` | ||||||
|  | * :class:`~mongoengine.EmailField` | ||||||
|  | * :class:`~mongoengine.EmbeddedDocumentField` | ||||||
|  | * :class:`~mongoengine.FileField` | ||||||
|  | * :class:`~mongoengine.FloatField` | ||||||
|  | * :class:`~mongoengine.GenericEmbeddedDocumentField` | ||||||
|  | * :class:`~mongoengine.GenericReferenceField` | ||||||
|  | * :class:`~mongoengine.GeoPointField` | ||||||
|  | * :class:`~mongoengine.ImageField` | ||||||
|  | * :class:`~mongoengine.IntField` | ||||||
|  | * :class:`~mongoengine.ListField` | ||||||
| * :class:`~mongoengine.MapField` | * :class:`~mongoengine.MapField` | ||||||
| * :class:`~mongoengine.ObjectIdField` | * :class:`~mongoengine.ObjectIdField` | ||||||
| * :class:`~mongoengine.ReferenceField` | * :class:`~mongoengine.ReferenceField` | ||||||
| * :class:`~mongoengine.GenericReferenceField` |  | ||||||
| * :class:`~mongoengine.EmbeddedDocumentField` |  | ||||||
| * :class:`~mongoengine.GenericEmbeddedDocumentField` |  | ||||||
| * :class:`~mongoengine.BooleanField` |  | ||||||
| * :class:`~mongoengine.FileField` |  | ||||||
| * :class:`~mongoengine.BinaryField` |  | ||||||
| * :class:`~mongoengine.GeoPointField` |  | ||||||
| * :class:`~mongoengine.SequenceField` | * :class:`~mongoengine.SequenceField` | ||||||
|  | * :class:`~mongoengine.SortedListField` | ||||||
|  | * :class:`~mongoengine.StringField` | ||||||
|  | * :class:`~mongoengine.URLField` | ||||||
|  | * :class:`~mongoengine.UUIDField` | ||||||
|  |  | ||||||
| Field arguments | Field arguments | ||||||
| --------------- | --------------- | ||||||
| @@ -256,6 +259,35 @@ as the constructor's argument:: | |||||||
|         content = StringField() |         content = StringField() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | .. _one-to-many-with-listfields: | ||||||
|  |  | ||||||
|  | One to Many with ListFields | ||||||
|  | ''''''''''''''''''''''''''' | ||||||
|  |  | ||||||
|  | If you are implementing a one to many relationship via a list of references, | ||||||
|  | then the references are stored as DBRefs and to query you need to pass an | ||||||
|  | instance of the object to the query:: | ||||||
|  |  | ||||||
|  |     class User(Document): | ||||||
|  |         name = StringField() | ||||||
|  |  | ||||||
|  |     class Page(Document): | ||||||
|  |         content = StringField() | ||||||
|  |         authors = ListField(ReferenceField(User)) | ||||||
|  |  | ||||||
|  |     bob = User(name="Bob Jones").save() | ||||||
|  |     john = User(name="John Smith").save() | ||||||
|  |  | ||||||
|  |     Page(content="Test Page", authors=[bob, john]).save() | ||||||
|  |     Page(content="Another Page", authors=[john]).save() | ||||||
|  |  | ||||||
|  |     # Find all pages Bob authored | ||||||
|  |     Page.objects(authors__in=[bob]) | ||||||
|  |  | ||||||
|  |     # Find all pages that both Bob and John have authored | ||||||
|  |     Page.objects(authors__all=[bob, john]) | ||||||
|  |  | ||||||
|  |  | ||||||
| Dealing with deletion of referred documents | Dealing with deletion of referred documents | ||||||
| ''''''''''''''''''''''''''''''''''''''''''' | ''''''''''''''''''''''''''''''''''''''''''' | ||||||
| By default, MongoDB doesn't check the integrity of your data, so deleting | By default, MongoDB doesn't check the integrity of your data, so deleting | ||||||
| @@ -312,6 +344,10 @@ Its value can take any of the following constants: | |||||||
|    their :file:`models.py` in the :const:`INSTALLED_APPS` tuple. |    their :file:`models.py` in the :const:`INSTALLED_APPS` tuple. | ||||||
|  |  | ||||||
|  |  | ||||||
|  | .. warning:: | ||||||
|  |    Signals are not triggered when doing cascading updates / deletes - if this | ||||||
|  |    is required you must manually handle the update / delete. | ||||||
|  |  | ||||||
| Generic reference fields | Generic reference fields | ||||||
| '''''''''''''''''''''''' | '''''''''''''''''''''''' | ||||||
| A second kind of reference field also exists, | A second kind of reference field also exists, | ||||||
| @@ -433,13 +469,18 @@ If a dictionary is passed then the following options are available: | |||||||
|     Whether the index should be sparse. |     Whether the index should be sparse. | ||||||
|  |  | ||||||
| :attr:`unique` (Default: False) | :attr:`unique` (Default: False) | ||||||
|     Whether the index should be sparse. |     Whether the index should be unique. | ||||||
|  |  | ||||||
|  | .. note :: | ||||||
|  |  | ||||||
|  |     To index embedded files / dictionary fields use 'dot' notation eg: | ||||||
|  |     `rank.title` | ||||||
|  |  | ||||||
| .. warning:: | .. warning:: | ||||||
|  |  | ||||||
|  |     Inheritance adds extra indices. | ||||||
|    Inheritance adds extra indices. |     If don't need inheritance for a document turn inheritance off - | ||||||
|    If don't need inheritance for a document turn inheritance off - see :ref:`document-inheritance`. |     see :ref:`document-inheritance`. | ||||||
|  |  | ||||||
|  |  | ||||||
| Geospatial indexes | Geospatial indexes | ||||||
|   | |||||||
| @@ -232,7 +232,7 @@ custom manager methods as you like:: | |||||||
|     BlogPost(title='test1', published=False).save() |     BlogPost(title='test1', published=False).save() | ||||||
|     BlogPost(title='test2', published=True).save() |     BlogPost(title='test2', published=True).save() | ||||||
|     assert len(BlogPost.objects) == 2 |     assert len(BlogPost.objects) == 2 | ||||||
|     assert len(BlogPost.live_posts) == 1 |     assert len(BlogPost.live_posts()) == 1 | ||||||
|  |  | ||||||
| Custom QuerySets | Custom QuerySets | ||||||
| ================ | ================ | ||||||
| @@ -243,11 +243,16 @@ a document, set ``queryset_class`` to the custom class in a | |||||||
| :class:`~mongoengine.Document`\ s ``meta`` dictionary:: | :class:`~mongoengine.Document`\ s ``meta`` dictionary:: | ||||||
|  |  | ||||||
|     class AwesomerQuerySet(QuerySet): |     class AwesomerQuerySet(QuerySet): | ||||||
|         pass |  | ||||||
|  |         def get_awesome(self): | ||||||
|  |             return self.filter(awesome=True) | ||||||
|  |  | ||||||
|     class Page(Document): |     class Page(Document): | ||||||
|         meta = {'queryset_class': AwesomerQuerySet} |         meta = {'queryset_class': AwesomerQuerySet} | ||||||
|  |  | ||||||
|  |     # To call: | ||||||
|  |     Page.objects.get_awesome() | ||||||
|  |  | ||||||
| .. versionadded:: 0.4 | .. versionadded:: 0.4 | ||||||
|  |  | ||||||
| Aggregation | Aggregation | ||||||
|   | |||||||
| @@ -50,4 +50,11 @@ Example usage:: | |||||||
|     signals.post_save.connect(Author.post_save, sender=Author) |     signals.post_save.connect(Author.post_save, sender=Author) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | ReferenceFields and signals | ||||||
|  | --------------------------- | ||||||
|  |  | ||||||
|  | Currently `reverse_delete_rules` do not trigger signals on the other part of | ||||||
|  | the relationship.  If this is required you must manually handled the | ||||||
|  | reverse deletion. | ||||||
|  |  | ||||||
| .. _blinker: http://pypi.python.org/pypi/blinker | .. _blinker: http://pypi.python.org/pypi/blinker | ||||||
|   | |||||||
| @@ -34,10 +34,10 @@ To get help with using MongoEngine, use the `MongoEngine Users mailing list | |||||||
| Contributing | Contributing | ||||||
| ------------ | ------------ | ||||||
|  |  | ||||||
| The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_ and | The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ and | ||||||
| contributions are always encouraged. Contributions can be as simple as | contributions are always encouraged. Contributions can be as simple as | ||||||
| minor tweaks to this documentation. To contribute, fork the project on | minor tweaks to this documentation. To contribute, fork the project on | ||||||
| `GitHub <http://github.com/hmarr/mongoengine>`_ and send a | `GitHub <http://github.com/MongoEngine/mongoengine>`_ and send a | ||||||
| pull request. | pull request. | ||||||
|  |  | ||||||
| Also, you can join the developers' `mailing list | Also, you can join the developers' `mailing list | ||||||
|   | |||||||
| @@ -2,18 +2,86 @@ | |||||||
| Upgrading | Upgrading | ||||||
| ========= | ========= | ||||||
|  |  | ||||||
|  | 0.6 to 0.7 | ||||||
|  | ========== | ||||||
|  |  | ||||||
|  | Cascade saves | ||||||
|  | ------------- | ||||||
|  |  | ||||||
|  | Saves will raise a `FutureWarning` if they cascade and cascade hasn't been set | ||||||
|  | to True.  This is because in 0.8 it will default to False.  If you require | ||||||
|  | cascading saves then either set it in the `meta` or pass | ||||||
|  | via `save` eg :: | ||||||
|  |  | ||||||
|  |     # At the class level: | ||||||
|  |     class Person(Document): | ||||||
|  |         meta = {'cascade': True} | ||||||
|  |  | ||||||
|  |     # Or in code: | ||||||
|  |     my_document.save(cascade=True) | ||||||
|  |  | ||||||
|  | .. note :: | ||||||
|  |     Remember: cascading saves **do not** cascade through lists. | ||||||
|  |  | ||||||
|  | ReferenceFields | ||||||
|  | --------------- | ||||||
|  |  | ||||||
|  | ReferenceFields now can store references as ObjectId strings instead of DBRefs. | ||||||
|  | This will become the default in 0.8 and if `dbref` is not set a `FutureWarning` | ||||||
|  | will be raised. | ||||||
|  |  | ||||||
|  |  | ||||||
|  | To explicitly continue to use DBRefs change the `dbref` flag | ||||||
|  | to True :: | ||||||
|  |  | ||||||
|  |    class Person(Document): | ||||||
|  |        groups = ListField(ReferenceField(Group, dbref=True)) | ||||||
|  |  | ||||||
|  | To migrate to using strings instead of DBRefs you will have to manually | ||||||
|  | migrate :: | ||||||
|  |  | ||||||
|  |         # Step 1 - Migrate the model definition | ||||||
|  |         class Group(Document): | ||||||
|  |             author = ReferenceField(User, dbref=False) | ||||||
|  |             members = ListField(ReferenceField(User, dbref=False)) | ||||||
|  |  | ||||||
|  |         # Step 2 - Migrate the data | ||||||
|  |         for g in Group.objects(): | ||||||
|  |             g.author = g.author | ||||||
|  |             g.members = g.members | ||||||
|  |             g.save() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | item_frequencies | ||||||
|  | ---------------- | ||||||
|  |  | ||||||
|  | In the 0.6 series we added support for null / zero / false values in | ||||||
|  | item_frequencies.  A side effect was to return keys in the value they are | ||||||
|  | stored in rather than as string representations.  Your code may need to be | ||||||
|  | updated to handle native types rather than strings keys for the results of | ||||||
|  | item frequency queries. | ||||||
|  |  | ||||||
|  | BinaryFields | ||||||
|  | ------------ | ||||||
|  |  | ||||||
|  | Binary fields have been updated so that they are native binary types.  If you | ||||||
|  | previously were doing `str` comparisons with binary field values you will have | ||||||
|  | to update and wrap the value in a `str`. | ||||||
|  |  | ||||||
| 0.5 to 0.6 | 0.5 to 0.6 | ||||||
| ========== | ========== | ||||||
|  |  | ||||||
| Embedded Documents - if you had a `pk` field you will have to rename it from `_id` | Embedded Documents - if you had a `pk` field you will have to rename it from | ||||||
| to `pk` as pk is no longer a property of Embedded Documents. | `_id` to `pk` as pk is no longer a property of Embedded Documents. | ||||||
|  |  | ||||||
| Reverse Delete Rules in Embedded Documents, MapFields and DictFields now throw | Reverse Delete Rules in Embedded Documents, MapFields and DictFields now throw | ||||||
| an InvalidDocument error as they aren't currently supported. | an InvalidDocument error as they aren't currently supported. | ||||||
|  |  | ||||||
| Document._get_subclasses - Is no longer used and the class method has been removed. | Document._get_subclasses - Is no longer used and the class method has been | ||||||
|  | removed. | ||||||
|  |  | ||||||
| Document.objects.with_id - now raises an InvalidQueryError if used with a filter. | Document.objects.with_id - now raises an InvalidQueryError if used with a | ||||||
|  | filter. | ||||||
|  |  | ||||||
| FutureWarning - A future warning has been added to all inherited classes that | FutureWarning - A future warning has been added to all inherited classes that | ||||||
| don't define `allow_inheritance` in their meta. | don't define `allow_inheritance` in their meta. | ||||||
| @@ -37,11 +105,11 @@ human-readable name for the option. | |||||||
| PyMongo / MongoDB | PyMongo / MongoDB | ||||||
| ----------------- | ----------------- | ||||||
|  |  | ||||||
| map reduce now requires pymongo 1.11+- The pymongo merge_output and reduce_output | map reduce now requires pymongo 1.11+- The pymongo `merge_output` and | ||||||
| parameters, have been depreciated. | `reduce_output` parameters, have been depreciated. | ||||||
|  |  | ||||||
| More methods now use map_reduce as db.eval is not supported for sharding as such | More methods now use map_reduce as db.eval is not supported for sharding as | ||||||
| the following have been changed: | such the following have been changed: | ||||||
|  |  | ||||||
|     * :meth:`~mongoengine.queryset.QuerySet.sum` |     * :meth:`~mongoengine.queryset.QuerySet.sum` | ||||||
|     * :meth:`~mongoengine.queryset.QuerySet.average` |     * :meth:`~mongoengine.queryset.QuerySet.average` | ||||||
| @@ -51,8 +119,8 @@ the following have been changed: | |||||||
| Default collection naming | Default collection naming | ||||||
| ------------------------- | ------------------------- | ||||||
|  |  | ||||||
| Previously it was just lowercase, its now much more pythonic and readable as its | Previously it was just lowercase, its now much more pythonic and readable as | ||||||
| lowercase and underscores, previously :: | its lowercase and underscores, previously :: | ||||||
|  |  | ||||||
|     class MyAceDocument(Document): |     class MyAceDocument(Document): | ||||||
|         pass |         pass | ||||||
| @@ -88,7 +156,8 @@ Alternatively, you can rename your collections eg :: | |||||||
|  |  | ||||||
|         failure = False |         failure = False | ||||||
|  |  | ||||||
|         collection_names = [d._get_collection_name() for d in _document_registry.values()] |         collection_names = [d._get_collection_name() | ||||||
|  |                             for d in _document_registry.values()] | ||||||
|  |  | ||||||
|         for new_style_name in collection_names: |         for new_style_name in collection_names: | ||||||
|             if not new_style_name:  # embedded documents don't have collections |             if not new_style_name:  # embedded documents don't have collections | ||||||
| @@ -106,7 +175,8 @@ Alternatively, you can rename your collections eg :: | |||||||
|                         old_style_name, new_style_name) |                         old_style_name, new_style_name) | ||||||
|                 else: |                 else: | ||||||
|                     db[old_style_name].rename(new_style_name) |                     db[old_style_name].rename(new_style_name) | ||||||
|                     print "Renamed:  %s to %s" % (old_style_name, new_style_name) |                     print "Renamed:  %s to %s" % (old_style_name, | ||||||
|  |                                                   new_style_name) | ||||||
|  |  | ||||||
|         if failure: |         if failure: | ||||||
|             print "Upgrading  collection names failed" |             print "Upgrading  collection names failed" | ||||||
|   | |||||||
| @@ -12,13 +12,12 @@ from signals import * | |||||||
| __all__ = (document.__all__ + fields.__all__ + connection.__all__ + | __all__ = (document.__all__ + fields.__all__ + connection.__all__ + | ||||||
|            queryset.__all__ + signals.__all__) |            queryset.__all__ + signals.__all__) | ||||||
|  |  | ||||||
| VERSION = (0, 6, 13) | VERSION = (0, 7, 5) | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_version(): | def get_version(): | ||||||
|     version = '%s.%s' % (VERSION[0], VERSION[1]) |     if isinstance(VERSION[-1], basestring): | ||||||
|     if VERSION[2]: |         return '.'.join(map(str, VERSION[:-1])) + VERSION[-1] | ||||||
|         version = '%s.%s' % (version, VERSION[2]) |     return '.'.join(map(str, VERSION)) | ||||||
|     return version |  | ||||||
|  |  | ||||||
| __version__ = get_version() | __version__ = get_version() | ||||||
|   | |||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -31,15 +31,34 @@ class DeReference(object): | |||||||
|             items = [i for i in items] |             items = [i for i in items] | ||||||
|  |  | ||||||
|         self.max_depth = max_depth |         self.max_depth = max_depth | ||||||
|  |  | ||||||
|         doc_type = None |         doc_type = None | ||||||
|  |  | ||||||
|         if instance and instance._fields: |         if instance and instance._fields: | ||||||
|             doc_type = instance._fields[name].field |             doc_type = instance._fields.get(name) | ||||||
|  |             if hasattr(doc_type, 'field'): | ||||||
|  |                 doc_type = doc_type.field | ||||||
|  |  | ||||||
|             if isinstance(doc_type, ReferenceField): |             if isinstance(doc_type, ReferenceField): | ||||||
|  |                 field = doc_type | ||||||
|                 doc_type = doc_type.document_type |                 doc_type = doc_type.document_type | ||||||
|                 if all([i.__class__ == doc_type for i in items]): |                 is_list = not hasattr(items, 'items') | ||||||
|  |  | ||||||
|  |                 if is_list and all([i.__class__ == doc_type for i in items]): | ||||||
|                     return items |                     return items | ||||||
|  |                 elif not is_list and all([i.__class__ == doc_type | ||||||
|  |                                          for i in items.values()]): | ||||||
|  |                     return items | ||||||
|  |                 elif not field.dbref: | ||||||
|  |                     if not hasattr(items, 'items'): | ||||||
|  |                         items = [field.to_python(v) | ||||||
|  |                              if not isinstance(v, (DBRef, Document)) else v | ||||||
|  |                              for v in items] | ||||||
|  |                     else: | ||||||
|  |                         items = dict([ | ||||||
|  |                             (k, field.to_python(v)) | ||||||
|  |                             if not isinstance(v, (DBRef, Document)) else (k, v) | ||||||
|  |                             for k, v in items.iteritems()] | ||||||
|  |                         ) | ||||||
|  |  | ||||||
|         self.reference_map = self._find_references(items) |         self.reference_map = self._find_references(items) | ||||||
|         self.object_map = self._fetch_objects(doc_type=doc_type) |         self.object_map = self._fetch_objects(doc_type=doc_type) | ||||||
| @@ -115,7 +134,7 @@ class DeReference(object): | |||||||
|                         elif doc_type is None: |                         elif doc_type is None: | ||||||
|                             doc = get_document( |                             doc = get_document( | ||||||
|                                 ''.join(x.capitalize() |                                 ''.join(x.capitalize() | ||||||
|                                         for x in col.split('_')))._from_son(ref) |                                     for x in col.split('_')))._from_son(ref) | ||||||
|                         else: |                         else: | ||||||
|                             doc = doc_type._from_son(ref) |                             doc = doc_type._from_son(ref) | ||||||
|                         object_map[doc.id] = doc |                         object_map[doc.id] = doc | ||||||
| @@ -147,7 +166,7 @@ class DeReference(object): | |||||||
|                 return self.object_map.get(items['_ref'].id, items) |                 return self.object_map.get(items['_ref'].id, items) | ||||||
|             elif '_types' in items and '_cls' in items: |             elif '_types' in items and '_cls' in items: | ||||||
|                 doc = get_document(items['_cls'])._from_son(items) |                 doc = get_document(items['_cls'])._from_son(items) | ||||||
|                 doc._data = self._attach_objects(doc._data, depth, doc, name) |                 doc._data = self._attach_objects(doc._data, depth, doc, None) | ||||||
|                 return doc |                 return doc | ||||||
|  |  | ||||||
|         if not hasattr(items, 'items'): |         if not hasattr(items, 'items'): | ||||||
| @@ -166,7 +185,7 @@ class DeReference(object): | |||||||
|             else: |             else: | ||||||
|                 data[k] = v |                 data[k] = v | ||||||
|  |  | ||||||
|             if k in self.object_map: |             if k in self.object_map and not is_list: | ||||||
|                 data[k] = self.object_map[k] |                 data[k] = self.object_map[k] | ||||||
|             elif hasattr(v, '_fields'): |             elif hasattr(v, '_fields'): | ||||||
|                 for field_name, field in v._fields.iteritems(): |                 for field_name, field in v._fields.iteritems(): | ||||||
|   | |||||||
| @@ -3,6 +3,8 @@ import datetime | |||||||
| from mongoengine import * | from mongoengine import * | ||||||
|  |  | ||||||
| from django.utils.encoding import smart_str | from django.utils.encoding import smart_str | ||||||
|  | from django.contrib.auth.models import _user_get_all_permissions | ||||||
|  | from django.contrib.auth.models import _user_has_perm | ||||||
| from django.contrib.auth.models import AnonymousUser | from django.contrib.auth.models import AnonymousUser | ||||||
| from django.utils.translation import ugettext_lazy as _ | from django.utils.translation import ugettext_lazy as _ | ||||||
|  |  | ||||||
| @@ -104,6 +106,25 @@ class User(Document): | |||||||
|         """ |         """ | ||||||
|         return check_password(raw_password, self.password) |         return check_password(raw_password, self.password) | ||||||
|  |  | ||||||
|  |     def get_all_permissions(self, obj=None): | ||||||
|  |         return _user_get_all_permissions(self, obj) | ||||||
|  |  | ||||||
|  |     def has_perm(self, perm, obj=None): | ||||||
|  |         """ | ||||||
|  |         Returns True if the user has the specified permission. This method | ||||||
|  |         queries all available auth backends, but returns immediately if any | ||||||
|  |         backend returns True. Thus, a user who has permission from a single | ||||||
|  |         auth backend is assumed to have permission in general. If an object is | ||||||
|  |         provided, permissions for this specific object are checked. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         # Active superusers have all permissions. | ||||||
|  |         if self.is_active and self.is_superuser: | ||||||
|  |             return True | ||||||
|  |  | ||||||
|  |         # Otherwise we need to check the backends. | ||||||
|  |         return _user_has_perm(self, perm, obj) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def create_user(cls, username, password, email=None): |     def create_user(cls, username, password, email=None): | ||||||
|         """Create (and save) a new user with the given username, password and |         """Create (and save) a new user with the given username, password and | ||||||
|   | |||||||
| @@ -1,4 +1,3 @@ | |||||||
| from django.http import Http404 |  | ||||||
| from mongoengine.queryset import QuerySet | from mongoengine.queryset import QuerySet | ||||||
| from mongoengine.base import BaseDocument | from mongoengine.base import BaseDocument | ||||||
| from mongoengine.base import ValidationError | from mongoengine.base import ValidationError | ||||||
| @@ -27,6 +26,7 @@ def get_document_or_404(cls, *args, **kwargs): | |||||||
|     try: |     try: | ||||||
|         return queryset.get(*args, **kwargs) |         return queryset.get(*args, **kwargs) | ||||||
|     except (queryset._document.DoesNotExist, ValidationError): |     except (queryset._document.DoesNotExist, ValidationError): | ||||||
|  |         from django.http import Http404 | ||||||
|         raise Http404('No %s matches the given query.' % queryset._document._class_name) |         raise Http404('No %s matches the given query.' % queryset._document._class_name) | ||||||
|  |  | ||||||
| def get_list_or_404(cls, *args, **kwargs): | def get_list_or_404(cls, *args, **kwargs): | ||||||
| @@ -42,5 +42,6 @@ def get_list_or_404(cls, *args, **kwargs): | |||||||
|     queryset = _get_queryset(cls) |     queryset = _get_queryset(cls) | ||||||
|     obj_list = list(queryset.filter(*args, **kwargs)) |     obj_list = list(queryset.filter(*args, **kwargs)) | ||||||
|     if not obj_list: |     if not obj_list: | ||||||
|  |         from django.http import Http404 | ||||||
|         raise Http404('No %s matches the given query.' % queryset._document._class_name) |         raise Http404('No %s matches the given query.' % queryset._document._class_name) | ||||||
|     return obj_list |     return obj_list | ||||||
|   | |||||||
| @@ -1,10 +1,28 @@ | |||||||
| #coding: utf-8 | #coding: utf-8 | ||||||
| from django.test import TestCase | from nose.plugins.skip import SkipTest | ||||||
| from django.conf import settings |  | ||||||
|  |  | ||||||
|  | from mongoengine.python_support import PY3 | ||||||
| from mongoengine import connect | from mongoengine import connect | ||||||
|  |  | ||||||
|  | try: | ||||||
|  |     from django.test import TestCase | ||||||
|  |     from django.conf import settings | ||||||
|  | except Exception as err: | ||||||
|  |     if PY3: | ||||||
|  |         from unittest import TestCase | ||||||
|  |         # Dummy value so no error | ||||||
|  |         class settings: | ||||||
|  |             MONGO_DATABASE_NAME = 'dummy' | ||||||
|  |     else: | ||||||
|  |         raise err | ||||||
|  |  | ||||||
|  |  | ||||||
| class MongoTestCase(TestCase): | class MongoTestCase(TestCase): | ||||||
|  |  | ||||||
|  |     def setUp(self): | ||||||
|  |         if PY3: | ||||||
|  |             raise SkipTest('django does not have Python 3 support') | ||||||
|  |  | ||||||
|     """ |     """ | ||||||
|     TestCase class that clear the collection between the tests |     TestCase class that clear the collection between the tests | ||||||
|     """ |     """ | ||||||
|   | |||||||
| @@ -1,15 +1,19 @@ | |||||||
|  | import warnings | ||||||
|  |  | ||||||
| import pymongo | import pymongo | ||||||
|  | import re | ||||||
|  |  | ||||||
| from bson.dbref import DBRef | from bson.dbref import DBRef | ||||||
|  | from mongoengine import signals, queryset | ||||||
|  |  | ||||||
| from mongoengine import signals |  | ||||||
| from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, | from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, | ||||||
|                   BaseDict, BaseList) |                   BaseDict, BaseList) | ||||||
| from queryset import OperationError | from queryset import OperationError, NotUniqueError | ||||||
| from connection import get_db, DEFAULT_CONNECTION_NAME | from connection import get_db, DEFAULT_CONNECTION_NAME | ||||||
|  |  | ||||||
| __all__ = ['Document', 'EmbeddedDocument', 'DynamicDocument', | __all__ = ['Document', 'EmbeddedDocument', 'DynamicDocument', | ||||||
|            'DynamicEmbeddedDocument', 'OperationError', 'InvalidCollectionError'] |            'DynamicEmbeddedDocument', 'OperationError', | ||||||
|  |            'InvalidCollectionError', 'NotUniqueError'] | ||||||
|  |  | ||||||
|  |  | ||||||
| class InvalidCollectionError(Exception): | class InvalidCollectionError(Exception): | ||||||
| @@ -21,8 +25,19 @@ class EmbeddedDocument(BaseDocument): | |||||||
|     collection.  :class:`~mongoengine.EmbeddedDocument`\ s should be used as |     collection.  :class:`~mongoengine.EmbeddedDocument`\ s should be used as | ||||||
|     fields on :class:`~mongoengine.Document`\ s through the |     fields on :class:`~mongoengine.Document`\ s through the | ||||||
|     :class:`~mongoengine.EmbeddedDocumentField` field type. |     :class:`~mongoengine.EmbeddedDocumentField` field type. | ||||||
|  |  | ||||||
|  |     A :class:`~mongoengine.EmbeddedDocument` subclass may be itself subclassed, | ||||||
|  |     to create a specialised version of the embedded document that will be | ||||||
|  |     stored in the same collection. To facilitate this behaviour, `_cls` and | ||||||
|  |     `_types` fields are added to documents (hidden though the MongoEngine | ||||||
|  |     interface though). To disable this behaviour and remove the dependence on | ||||||
|  |     the presence of `_cls` and `_types`, set :attr:`allow_inheritance` to | ||||||
|  |     ``False`` in the :attr:`meta` dictionary. | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|  |     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||||
|  |     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||||
|  |     my_metaclass  = DocumentMetaclass | ||||||
|     __metaclass__ = DocumentMetaclass |     __metaclass__ = DocumentMetaclass | ||||||
|  |  | ||||||
|     def __init__(self, *args, **kwargs): |     def __init__(self, *args, **kwargs): | ||||||
| @@ -91,9 +106,12 @@ class Document(BaseDocument): | |||||||
|     disabled by either setting types to False on the specific index or |     disabled by either setting types to False on the specific index or | ||||||
|     by setting index_types to False on the meta dictionary for the document. |     by setting index_types to False on the meta dictionary for the document. | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|  |     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||||
|  |     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||||
|  |     my_metaclass  = TopLevelDocumentMetaclass | ||||||
|     __metaclass__ = TopLevelDocumentMetaclass |     __metaclass__ = TopLevelDocumentMetaclass | ||||||
|  |  | ||||||
|     @apply |  | ||||||
|     def pk(): |     def pk(): | ||||||
|         """Primary key alias |         """Primary key alias | ||||||
|         """ |         """ | ||||||
| @@ -102,6 +120,7 @@ class Document(BaseDocument): | |||||||
|         def fset(self, value): |         def fset(self, value): | ||||||
|             return setattr(self, self._meta['id_field'], value) |             return setattr(self, self._meta['id_field'], value) | ||||||
|         return property(fget, fset) |         return property(fget, fset) | ||||||
|  |     pk = pk() | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def _get_db(cls): |     def _get_db(cls): | ||||||
| @@ -127,8 +146,9 @@ class Document(BaseDocument): | |||||||
|                     options = cls._collection.options() |                     options = cls._collection.options() | ||||||
|                     if options.get('max') != max_documents or \ |                     if options.get('max') != max_documents or \ | ||||||
|                        options.get('size') != max_size: |                        options.get('size') != max_size: | ||||||
|                         msg = ('Cannot create collection "%s" as a capped ' |                         msg = (('Cannot create collection "%s" as a capped ' | ||||||
|                                'collection as it already exists') % cls._collection |                                'collection as it already exists') | ||||||
|  |                                 % cls._collection) | ||||||
|                         raise InvalidCollectionError(msg) |                         raise InvalidCollectionError(msg) | ||||||
|                 else: |                 else: | ||||||
|                     # Create the collection as a capped collection |                     # Create the collection as a capped collection | ||||||
| @@ -142,8 +162,9 @@ class Document(BaseDocument): | |||||||
|                 cls._collection = db[collection_name] |                 cls._collection = db[collection_name] | ||||||
|         return cls._collection |         return cls._collection | ||||||
|  |  | ||||||
|     def save(self, safe=True, force_insert=False, validate=True, write_options=None, |     def save(self, safe=True, force_insert=False, validate=True, | ||||||
|             cascade=None, cascade_kwargs=None, _refs=None): |              write_options=None,  cascade=None, cascade_kwargs=None, | ||||||
|  |              _refs=None): | ||||||
|         """Save the :class:`~mongoengine.Document` to the database. If the |         """Save the :class:`~mongoengine.Document` to the database. If the | ||||||
|         document already exists, it will be updated, otherwise it will be |         document already exists, it will be updated, otherwise it will be | ||||||
|         created. |         created. | ||||||
| @@ -156,27 +177,30 @@ class Document(BaseDocument): | |||||||
|             updates of existing documents |             updates of existing documents | ||||||
|         :param validate: validates the document; set to ``False`` to skip. |         :param validate: validates the document; set to ``False`` to skip. | ||||||
|         :param write_options: Extra keyword arguments are passed down to |         :param write_options: Extra keyword arguments are passed down to | ||||||
|                 :meth:`~pymongo.collection.Collection.save` OR |             :meth:`~pymongo.collection.Collection.save` OR | ||||||
|                 :meth:`~pymongo.collection.Collection.insert` |             :meth:`~pymongo.collection.Collection.insert` | ||||||
|                 which will be used as options for the resultant ``getLastError`` command. |             which will be used as options for the resultant | ||||||
|                 For example, ``save(..., write_options={w: 2, fsync: True}, ...)`` will |             ``getLastError`` command.  For example, | ||||||
|                 wait until at least two servers have recorded the write and will force an |             ``save(..., write_options={w: 2, fsync: True}, ...)`` will | ||||||
|                 fsync on each server being written to. |             wait until at least two servers have recorded the write and | ||||||
|         :param cascade: Sets the flag for cascading saves.  You can set a default by setting |             will force an fsync on the primary server. | ||||||
|             "cascade" in the document __meta__ |         :param cascade: Sets the flag for cascading saves.  You can set a | ||||||
|         :param cascade_kwargs: optional kwargs dictionary to be passed throw to cascading saves |             default by setting "cascade" in the document __meta__ | ||||||
|  |         :param cascade_kwargs: optional kwargs dictionary to be passed throw | ||||||
|  |             to cascading saves | ||||||
|         :param _refs: A list of processed references used in cascading saves |         :param _refs: A list of processed references used in cascading saves | ||||||
|  |  | ||||||
|         .. versionchanged:: 0.5 |         .. versionchanged:: 0.5 | ||||||
|             In existing documents it only saves changed fields using set / unset |             In existing documents it only saves changed fields using | ||||||
|             Saves are cascaded and any :class:`~bson.dbref.DBRef` objects |             set / unset.  Saves are cascaded and any | ||||||
|             that have changes are saved as well. |             :class:`~bson.dbref.DBRef` objects that have changes are | ||||||
|  |             saved as well. | ||||||
|         .. versionchanged:: 0.6 |         .. versionchanged:: 0.6 | ||||||
|             Cascade saves are optional = defaults to True, if you want fine grain |             Cascade saves are optional = defaults to True, if you want | ||||||
|             control then you can turn off using document meta['cascade'] = False |             fine grain control then you can turn off using document | ||||||
|             Also you can pass different kwargs to the cascade save using cascade_kwargs |             meta['cascade'] = False  Also you can pass different kwargs to | ||||||
|             which overwrites the existing kwargs with custom values |             the cascade save using cascade_kwargs which overwrites the | ||||||
|  |             existing kwargs with custom values | ||||||
|         """ |         """ | ||||||
|         signals.pre_save.send(self.__class__, document=self) |         signals.pre_save.send(self.__class__, document=self) | ||||||
|  |  | ||||||
| @@ -194,13 +218,14 @@ class Document(BaseDocument): | |||||||
|             collection = self.__class__.objects._collection |             collection = self.__class__.objects._collection | ||||||
|             if created: |             if created: | ||||||
|                 if force_insert: |                 if force_insert: | ||||||
|                     object_id = collection.insert(doc, safe=safe, **write_options) |                     object_id = collection.insert(doc, safe=safe, | ||||||
|  |                                                   **write_options) | ||||||
|                 else: |                 else: | ||||||
|                     object_id = collection.save(doc, safe=safe, **write_options) |                     object_id = collection.save(doc, safe=safe, | ||||||
|  |                                                 **write_options) | ||||||
|             else: |             else: | ||||||
|                 object_id = doc['_id'] |                 object_id = doc['_id'] | ||||||
|                 updates, removals = self._delta() |                 updates, removals = self._delta() | ||||||
|  |  | ||||||
|                 # Need to add shard key to query, or you get an error |                 # Need to add shard key to query, or you get an error | ||||||
|                 select_dict = {'_id': object_id} |                 select_dict = {'_id': object_id} | ||||||
|                 shard_key = self.__class__._meta.get('shard_key', tuple()) |                 shard_key = self.__class__._meta.get('shard_key', tuple()) | ||||||
| @@ -210,11 +235,15 @@ class Document(BaseDocument): | |||||||
|  |  | ||||||
|                 upsert = self._created |                 upsert = self._created | ||||||
|                 if updates: |                 if updates: | ||||||
|                     collection.update(select_dict, {"$set": updates}, upsert=upsert, safe=safe, **write_options) |                     collection.update(select_dict, {"$set": updates}, | ||||||
|  |                         upsert=upsert, safe=safe, **write_options) | ||||||
|                 if removals: |                 if removals: | ||||||
|                     collection.update(select_dict, {"$unset": removals}, upsert=upsert, safe=safe, **write_options) |                     collection.update(select_dict, {"$unset": removals}, | ||||||
|  |                         upsert=upsert, safe=safe, **write_options) | ||||||
|  |  | ||||||
|             cascade = self._meta.get('cascade', True) if cascade is None else cascade |             warn_cascade = not cascade and 'cascade' not in self._meta | ||||||
|  |             cascade = (self._meta.get('cascade', True) | ||||||
|  |                        if cascade is None else cascade) | ||||||
|             if cascade: |             if cascade: | ||||||
|                 kwargs = { |                 kwargs = { | ||||||
|                     "safe": safe, |                     "safe": safe, | ||||||
| @@ -226,40 +255,64 @@ class Document(BaseDocument): | |||||||
|                 if cascade_kwargs:  # Allow granular control over cascades |                 if cascade_kwargs:  # Allow granular control over cascades | ||||||
|                     kwargs.update(cascade_kwargs) |                     kwargs.update(cascade_kwargs) | ||||||
|                 kwargs['_refs'] = _refs |                 kwargs['_refs'] = _refs | ||||||
|                 #self._changed_fields = [] |                 self.cascade_save(warn_cascade=warn_cascade, **kwargs) | ||||||
|                 self.cascade_save(**kwargs) |  | ||||||
|  |  | ||||||
|         except pymongo.errors.OperationFailure, err: |         except pymongo.errors.OperationFailure, err: | ||||||
|             message = 'Could not save document (%s)' |             message = 'Could not save document (%s)' | ||||||
|             if u'duplicate key' in unicode(err): |             if re.match('^E1100[01] duplicate key', unicode(err)): | ||||||
|  |                 # E11000 - duplicate key error index | ||||||
|  |                 # E11001 - duplicate key on update | ||||||
|                 message = u'Tried to save duplicate unique keys (%s)' |                 message = u'Tried to save duplicate unique keys (%s)' | ||||||
|  |                 raise NotUniqueError(message % unicode(err)) | ||||||
|             raise OperationError(message % unicode(err)) |             raise OperationError(message % unicode(err)) | ||||||
|         id_field = self._meta['id_field'] |         id_field = self._meta['id_field'] | ||||||
|         self[id_field] = self._fields[id_field].to_python(object_id) |         if id_field not in self._meta.get('shard_key', []): | ||||||
|  |             self[id_field] = self._fields[id_field].to_python(object_id) | ||||||
|  |  | ||||||
|         self._changed_fields = [] |         self._changed_fields = [] | ||||||
|         self._created = False |         self._created = False | ||||||
|         signals.post_save.send(self.__class__, document=self, created=created) |         signals.post_save.send(self.__class__, document=self, created=created) | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
|     def cascade_save(self, *args, **kwargs): |     def cascade_save(self, warn_cascade=None, *args, **kwargs): | ||||||
|         """Recursively saves any references / generic references on an object""" |         """Recursively saves any references / | ||||||
|         from fields import ReferenceField, GenericReferenceField |            generic references on an objects""" | ||||||
|  |         import fields | ||||||
|         _refs = kwargs.get('_refs', []) or [] |         _refs = kwargs.get('_refs', []) or [] | ||||||
|  |  | ||||||
|         for name, cls in self._fields.items(): |         for name, cls in self._fields.items(): | ||||||
|             if not isinstance(cls, (ReferenceField, GenericReferenceField)): |             if not isinstance(cls, (fields.ReferenceField, | ||||||
|  |                                     fields.GenericReferenceField)): | ||||||
|                 continue |                 continue | ||||||
|  |  | ||||||
|             ref = getattr(self, name) |             ref = getattr(self, name) | ||||||
|             if not ref: |             if not ref or isinstance(ref, DBRef): | ||||||
|                 continue |                 continue | ||||||
|  |  | ||||||
|  |             if not getattr(ref, '_changed_fields', True): | ||||||
|  |                 continue | ||||||
|  |  | ||||||
|             ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data)) |             ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data)) | ||||||
|             if ref and ref_id not in _refs: |             if ref and ref_id not in _refs: | ||||||
|  |                 if warn_cascade: | ||||||
|  |                     msg = ("Cascading saves will default to off in 0.8, " | ||||||
|  |                           "please  explicitly set `.save(cascade=True)`") | ||||||
|  |                     warnings.warn(msg, FutureWarning) | ||||||
|                 _refs.append(ref_id) |                 _refs.append(ref_id) | ||||||
|                 kwargs["_refs"] = _refs |                 kwargs["_refs"] = _refs | ||||||
|                 ref.save(**kwargs) |                 ref.save(**kwargs) | ||||||
|                 ref._changed_fields = [] |                 ref._changed_fields = [] | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def _object_key(self): | ||||||
|  |         """Dict to identify object in collection | ||||||
|  |         """ | ||||||
|  |         select_dict = {'pk': self.pk} | ||||||
|  |         shard_key = self.__class__._meta.get('shard_key', tuple()) | ||||||
|  |         for k in shard_key: | ||||||
|  |             select_dict[k] = getattr(self, k) | ||||||
|  |         return select_dict | ||||||
|  |  | ||||||
|     def update(self, **kwargs): |     def update(self, **kwargs): | ||||||
|         """Performs an update on the :class:`~mongoengine.Document` |         """Performs an update on the :class:`~mongoengine.Document` | ||||||
|         A convenience wrapper to :meth:`~mongoengine.QuerySet.update`. |         A convenience wrapper to :meth:`~mongoengine.QuerySet.update`. | ||||||
| @@ -271,11 +324,7 @@ class Document(BaseDocument): | |||||||
|             raise OperationError('attempt to update a document not yet saved') |             raise OperationError('attempt to update a document not yet saved') | ||||||
|  |  | ||||||
|         # Need to add shard key to query, or you get an error |         # Need to add shard key to query, or you get an error | ||||||
|         select_dict = {'pk': self.pk} |         return self.__class__.objects(**self._object_key).update_one(**kwargs) | ||||||
|         shard_key = self.__class__._meta.get('shard_key', tuple()) |  | ||||||
|         for k in shard_key: |  | ||||||
|             select_dict[k] = getattr(self, k) |  | ||||||
|         return self.__class__.objects(**select_dict).update_one(**kwargs) |  | ||||||
|  |  | ||||||
|     def delete(self, safe=False): |     def delete(self, safe=False): | ||||||
|         """Delete the :class:`~mongoengine.Document` from the database. This |         """Delete the :class:`~mongoengine.Document` from the database. This | ||||||
| @@ -286,7 +335,7 @@ class Document(BaseDocument): | |||||||
|         signals.pre_delete.send(self.__class__, document=self) |         signals.pre_delete.send(self.__class__, document=self) | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             self.__class__.objects(pk=self.pk).delete(safe=safe) |             self.__class__.objects(**self._object_key).delete(safe=safe) | ||||||
|         except pymongo.errors.OperationFailure, err: |         except pymongo.errors.OperationFailure, err: | ||||||
|             message = u'Could not delete document (%s)' % err.message |             message = u'Could not delete document (%s)' % err.message | ||||||
|             raise OperationError(message) |             raise OperationError(message) | ||||||
| @@ -299,8 +348,8 @@ class Document(BaseDocument): | |||||||
|  |  | ||||||
|         .. versionadded:: 0.5 |         .. versionadded:: 0.5 | ||||||
|         """ |         """ | ||||||
|         from dereference import DeReference |         import dereference | ||||||
|         self._data = DeReference()(self._data, max_depth) |         self._data = dereference.DeReference()(self._data, max_depth) | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
|     def reload(self, max_depth=1): |     def reload(self, max_depth=1): | ||||||
| @@ -312,7 +361,12 @@ class Document(BaseDocument): | |||||||
|         id_field = self._meta['id_field'] |         id_field = self._meta['id_field'] | ||||||
|         obj = self.__class__.objects( |         obj = self.__class__.objects( | ||||||
|                 **{id_field: self[id_field]} |                 **{id_field: self[id_field]} | ||||||
|               ).first().select_related(max_depth=max_depth) |               ).limit(1).select_related(max_depth=max_depth) | ||||||
|  |         if obj: | ||||||
|  |             obj = obj[0] | ||||||
|  |         else: | ||||||
|  |             msg = "Reloaded document has been deleted" | ||||||
|  |             raise OperationError(msg) | ||||||
|         for field in self._fields: |         for field in self._fields: | ||||||
|             setattr(self, field, self._reload(field, obj[field])) |             setattr(self, field, self._reload(field, obj[field])) | ||||||
|         if self._dynamic: |         if self._dynamic: | ||||||
| @@ -348,17 +402,18 @@ class Document(BaseDocument): | |||||||
|         """This method registers the delete rules to apply when removing this |         """This method registers the delete rules to apply when removing this | ||||||
|         object. |         object. | ||||||
|         """ |         """ | ||||||
|         cls._meta['delete_rules'][(document_cls, field_name)] = rule |         delete_rules = cls._meta.get('delete_rules') or {} | ||||||
|  |         delete_rules[(document_cls, field_name)] = rule | ||||||
|  |         cls._meta['delete_rules'] = delete_rules | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def drop_collection(cls): |     def drop_collection(cls): | ||||||
|         """Drops the entire collection associated with this |         """Drops the entire collection associated with this | ||||||
|         :class:`~mongoengine.Document` type from the database. |         :class:`~mongoengine.Document` type from the database. | ||||||
|         """ |         """ | ||||||
|         from mongoengine.queryset import QuerySet |  | ||||||
|         db = cls._get_db() |         db = cls._get_db() | ||||||
|         db.drop_collection(cls._get_collection_name()) |         db.drop_collection(cls._get_collection_name()) | ||||||
|         QuerySet._reset_already_indexed(cls) |         queryset.QuerySet._reset_already_indexed(cls) | ||||||
|  |  | ||||||
|  |  | ||||||
| class DynamicDocument(Document): | class DynamicDocument(Document): | ||||||
| @@ -370,11 +425,16 @@ class DynamicDocument(Document): | |||||||
|     :class:`~mongoengine.DynamicField` and data can be attributed to that |     :class:`~mongoengine.DynamicField` and data can be attributed to that | ||||||
|     field. |     field. | ||||||
|  |  | ||||||
|     ..note:: |     .. note:: | ||||||
|  |  | ||||||
|         There is one caveat on Dynamic Documents: fields cannot start with `_` |         There is one caveat on Dynamic Documents: fields cannot start with `_` | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|  |     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||||
|  |     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||||
|  |     my_metaclass  = TopLevelDocumentMetaclass | ||||||
|     __metaclass__ = TopLevelDocumentMetaclass |     __metaclass__ = TopLevelDocumentMetaclass | ||||||
|  |  | ||||||
|     _dynamic = True |     _dynamic = True | ||||||
|  |  | ||||||
|     def __delattr__(self, *args, **kwargs): |     def __delattr__(self, *args, **kwargs): | ||||||
| @@ -393,7 +453,11 @@ class DynamicEmbeddedDocument(EmbeddedDocument): | |||||||
|     information about dynamic documents. |     information about dynamic documents. | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|  |     # The __metaclass__ attribute is removed by 2to3 when running with Python3 | ||||||
|  |     # my_metaclass is defined so that metaclass can be queried in Python 2 & 3 | ||||||
|  |     my_metaclass  = DocumentMetaclass | ||||||
|     __metaclass__ = DocumentMetaclass |     __metaclass__ = DocumentMetaclass | ||||||
|  |  | ||||||
|     _dynamic = True |     _dynamic = True | ||||||
|  |  | ||||||
|     def __delattr__(self, *args, **kwargs): |     def __delattr__(self, *args, **kwargs): | ||||||
|   | |||||||
| @@ -1,18 +1,24 @@ | |||||||
| import datetime | import datetime | ||||||
| import time |  | ||||||
| import decimal | import decimal | ||||||
| import gridfs | import itertools | ||||||
| import re | import re | ||||||
|  | import time | ||||||
|  | import urllib2 | ||||||
|  | import urlparse | ||||||
| import uuid | import uuid | ||||||
|  | import warnings | ||||||
|  | from operator import itemgetter | ||||||
|  |  | ||||||
|  | import gridfs | ||||||
| from bson import Binary, DBRef, SON, ObjectId | from bson import Binary, DBRef, SON, ObjectId | ||||||
|  |  | ||||||
|  | from mongoengine.python_support import (PY3, bin_type, txt_type, | ||||||
|  |                                         str_types, StringIO) | ||||||
| from base import (BaseField, ComplexBaseField, ObjectIdField, | from base import (BaseField, ComplexBaseField, ObjectIdField, | ||||||
|                   ValidationError, get_document, BaseDocument) |                   ValidationError, get_document, BaseDocument) | ||||||
| from queryset import DO_NOTHING, QuerySet | from queryset import DO_NOTHING, QuerySet | ||||||
| from document import Document, EmbeddedDocument | from document import Document, EmbeddedDocument | ||||||
| from connection import get_db, DEFAULT_CONNECTION_NAME | from connection import get_db, DEFAULT_CONNECTION_NAME | ||||||
| from operator import itemgetter |  | ||||||
|  |  | ||||||
|  |  | ||||||
| try: | try: | ||||||
| @@ -21,12 +27,6 @@ except ImportError: | |||||||
|     Image = None |     Image = None | ||||||
|     ImageOps = None |     ImageOps = None | ||||||
|  |  | ||||||
| try: |  | ||||||
|     from cStringIO import StringIO |  | ||||||
| except ImportError: |  | ||||||
|     from StringIO import StringIO |  | ||||||
|  |  | ||||||
|  |  | ||||||
| __all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField', | __all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField', | ||||||
|            'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField', |            'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField', | ||||||
|            'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField', |            'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField', | ||||||
| @@ -51,8 +51,11 @@ class StringField(BaseField): | |||||||
|     def to_python(self, value): |     def to_python(self, value): | ||||||
|         if isinstance(value, unicode): |         if isinstance(value, unicode): | ||||||
|             return value |             return value | ||||||
|         else: |         try: | ||||||
|             return value.decode('utf-8') |             value = value.decode('utf-8') | ||||||
|  |         except: | ||||||
|  |             pass | ||||||
|  |         return value | ||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         if not isinstance(value, basestring): |         if not isinstance(value, basestring): | ||||||
| @@ -100,25 +103,30 @@ class URLField(StringField): | |||||||
|     .. versionadded:: 0.3 |     .. versionadded:: 0.3 | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     URL_REGEX = re.compile( |     _URL_REGEX = re.compile( | ||||||
|         r'^https?://' |         r'^(?:http|ftp)s?://' # http:// or https:// | ||||||
|         r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?|' |         r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain... | ||||||
|         r'localhost|' |         r'localhost|' #localhost... | ||||||
|         r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' |         r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip | ||||||
|         r'(?::\d+)?' |         r'(?::\d+)?' # optional port | ||||||
|         r'(?:/?|[/?]\S+)$', re.IGNORECASE |         r'(?:/?|[/?]\S+)$', re.IGNORECASE) | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     def __init__(self, verify_exists=False, **kwargs): |     def __init__(self, verify_exists=False, url_regex=None, **kwargs): | ||||||
|         self.verify_exists = verify_exists |         self.verify_exists = verify_exists | ||||||
|  |         self.url_regex = url_regex or self._URL_REGEX | ||||||
|         super(URLField, self).__init__(**kwargs) |         super(URLField, self).__init__(**kwargs) | ||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         if not URLField.URL_REGEX.match(value): |         if not self.url_regex.match(value): | ||||||
|             self.error('Invalid URL: %s' % value) |             self.error('Invalid URL: %s' % value) | ||||||
|  |             return | ||||||
|  |  | ||||||
|         if self.verify_exists: |         if self.verify_exists: | ||||||
|             import urllib2 |             warnings.warn( | ||||||
|  |                 "The URLField verify_exists argument has intractable security " | ||||||
|  |                 "and performance issues. Accordingly, it has been deprecated.", | ||||||
|  |             DeprecationWarning | ||||||
|  |             ) | ||||||
|             try: |             try: | ||||||
|                 request = urllib2.Request(value) |                 request = urllib2.Request(value) | ||||||
|                 urllib2.urlopen(request) |                 urllib2.urlopen(request) | ||||||
| @@ -152,7 +160,11 @@ class IntField(BaseField): | |||||||
|         super(IntField, self).__init__(**kwargs) |         super(IntField, self).__init__(**kwargs) | ||||||
|  |  | ||||||
|     def to_python(self, value): |     def to_python(self, value): | ||||||
|         return int(value) |         try: | ||||||
|  |             value = int(value) | ||||||
|  |         except ValueError: | ||||||
|  |             pass | ||||||
|  |         return value | ||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         try: |         try: | ||||||
| @@ -167,6 +179,9 @@ class IntField(BaseField): | |||||||
|             self.error('Integer value is too large') |             self.error('Integer value is too large') | ||||||
|  |  | ||||||
|     def prepare_query_value(self, op, value): |     def prepare_query_value(self, op, value): | ||||||
|  |         if value is None: | ||||||
|  |             return value | ||||||
|  |  | ||||||
|         return int(value) |         return int(value) | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -179,7 +194,11 @@ class FloatField(BaseField): | |||||||
|         super(FloatField, self).__init__(**kwargs) |         super(FloatField, self).__init__(**kwargs) | ||||||
|  |  | ||||||
|     def to_python(self, value): |     def to_python(self, value): | ||||||
|         return float(value) |         try: | ||||||
|  |             value = float(value) | ||||||
|  |         except ValueError: | ||||||
|  |             pass | ||||||
|  |         return value | ||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         if isinstance(value, int): |         if isinstance(value, int): | ||||||
| @@ -194,6 +213,9 @@ class FloatField(BaseField): | |||||||
|             self.error('Float value is too large') |             self.error('Float value is too large') | ||||||
|  |  | ||||||
|     def prepare_query_value(self, op, value): |     def prepare_query_value(self, op, value): | ||||||
|  |         if value is None: | ||||||
|  |             return value | ||||||
|  |  | ||||||
|         return float(value) |         return float(value) | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -208,9 +230,14 @@ class DecimalField(BaseField): | |||||||
|         super(DecimalField, self).__init__(**kwargs) |         super(DecimalField, self).__init__(**kwargs) | ||||||
|  |  | ||||||
|     def to_python(self, value): |     def to_python(self, value): | ||||||
|  |         original_value = value | ||||||
|         if not isinstance(value, basestring): |         if not isinstance(value, basestring): | ||||||
|             value = unicode(value) |             value = unicode(value) | ||||||
|         return decimal.Decimal(value) |         try: | ||||||
|  |             value = decimal.Decimal(value) | ||||||
|  |         except ValueError: | ||||||
|  |             return original_value | ||||||
|  |         return value | ||||||
|  |  | ||||||
|     def to_mongo(self, value): |     def to_mongo(self, value): | ||||||
|         return unicode(value) |         return unicode(value) | ||||||
| @@ -238,7 +265,11 @@ class BooleanField(BaseField): | |||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     def to_python(self, value): |     def to_python(self, value): | ||||||
|         return bool(value) |         try: | ||||||
|  |             value = bool(value) | ||||||
|  |         except ValueError: | ||||||
|  |             pass | ||||||
|  |         return value | ||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         if not isinstance(value, bool): |         if not isinstance(value, bool): | ||||||
| @@ -369,6 +400,8 @@ class ComplexDateTimeField(StringField): | |||||||
|         data = super(ComplexDateTimeField, self).__get__(instance, owner) |         data = super(ComplexDateTimeField, self).__get__(instance, owner) | ||||||
|         if data == None: |         if data == None: | ||||||
|             return datetime.datetime.now() |             return datetime.datetime.now() | ||||||
|  |         if isinstance(data, datetime.datetime): | ||||||
|  |             return data | ||||||
|         return self._convert_from_string(data) |         return self._convert_from_string(data) | ||||||
|  |  | ||||||
|     def __set__(self, instance, value): |     def __set__(self, instance, value): | ||||||
| @@ -381,7 +414,11 @@ class ComplexDateTimeField(StringField): | |||||||
|                        'ComplexDateTimeField') |                        'ComplexDateTimeField') | ||||||
|  |  | ||||||
|     def to_python(self, value): |     def to_python(self, value): | ||||||
|         return self._convert_from_string(value) |         original_value = value | ||||||
|  |         try: | ||||||
|  |             return self._convert_from_string(value) | ||||||
|  |         except: | ||||||
|  |             return original_value | ||||||
|  |  | ||||||
|     def to_mongo(self, value): |     def to_mongo(self, value): | ||||||
|         return self._convert_from_datetime(value) |         return self._convert_from_datetime(value) | ||||||
| @@ -445,8 +482,9 @@ class GenericEmbeddedDocumentField(BaseField): | |||||||
|  |  | ||||||
|     Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`. |     Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`. | ||||||
|  |  | ||||||
|     ..note :: You can use the choices param to limit the acceptable |     .. note :: | ||||||
|     EmbeddedDocument types |         You can use the choices param to limit the acceptable | ||||||
|  |         EmbeddedDocument types | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     def prepare_query_value(self, op, value): |     def prepare_query_value(self, op, value): | ||||||
| @@ -477,7 +515,10 @@ class GenericEmbeddedDocumentField(BaseField): | |||||||
|  |  | ||||||
|  |  | ||||||
| class DynamicField(BaseField): | class DynamicField(BaseField): | ||||||
|     """Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data""" |     """A truly dynamic field type capable of handling different and varying | ||||||
|  |     types of data. | ||||||
|  |  | ||||||
|  |     Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data""" | ||||||
|  |  | ||||||
|     def to_mongo(self, value): |     def to_mongo(self, value): | ||||||
|         """Convert a Python type to a MongoDBcompatible type. |         """Convert a Python type to a MongoDBcompatible type. | ||||||
| @@ -521,6 +562,8 @@ class ListField(ComplexBaseField): | |||||||
|     """A list field that wraps a standard field, allowing multiple instances |     """A list field that wraps a standard field, allowing multiple instances | ||||||
|     of the field to be used as a list in the database. |     of the field to be used as a list in the database. | ||||||
|  |  | ||||||
|  |     If using with ReferenceFields see: :ref:`one-to-many-with-listfields` | ||||||
|  |  | ||||||
|     .. note:: |     .. note:: | ||||||
|         Required means it cannot be empty - as the default for ListFields is [] |         Required means it cannot be empty - as the default for ListFields is [] | ||||||
|     """ |     """ | ||||||
| @@ -659,7 +702,8 @@ class ReferenceField(BaseField): | |||||||
|       * NULLIFY     - Updates the reference to null. |       * NULLIFY     - Updates the reference to null. | ||||||
|       * CASCADE     - Deletes the documents associated with the reference. |       * CASCADE     - Deletes the documents associated with the reference. | ||||||
|       * DENY        - Prevent the deletion of the reference object. |       * DENY        - Prevent the deletion of the reference object. | ||||||
|       * PULL        - Pull the reference from a :class:`~mongoengine.ListField` of references |       * PULL        - Pull the reference from a :class:`~mongoengine.ListField` | ||||||
|  |                       of references | ||||||
|  |  | ||||||
|     Alternative syntax for registering delete rules (useful when implementing |     Alternative syntax for registering delete rules (useful when implementing | ||||||
|     bi-directional delete rules) |     bi-directional delete rules) | ||||||
| @@ -672,12 +716,19 @@ class ReferenceField(BaseField): | |||||||
|  |  | ||||||
|         Bar.register_delete_rule(Foo, 'bar', NULLIFY) |         Bar.register_delete_rule(Foo, 'bar', NULLIFY) | ||||||
|  |  | ||||||
|  |     .. note :: | ||||||
|  |         `reverse_delete_rules` do not trigger pre / post delete signals to be | ||||||
|  |         triggered. | ||||||
|  |  | ||||||
|     .. versionchanged:: 0.5 added `reverse_delete_rule` |     .. versionchanged:: 0.5 added `reverse_delete_rule` | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     def __init__(self, document_type, reverse_delete_rule=DO_NOTHING, **kwargs): |     def __init__(self, document_type, dbref=None, | ||||||
|  |                  reverse_delete_rule=DO_NOTHING, **kwargs): | ||||||
|         """Initialises the Reference Field. |         """Initialises the Reference Field. | ||||||
|  |  | ||||||
|  |         :param dbref:  Store the reference as :class:`~pymongo.dbref.DBRef` | ||||||
|  |           or as the :class:`~pymongo.objectid.ObjectId`.id . | ||||||
|         :param reverse_delete_rule: Determines what to do when the referring |         :param reverse_delete_rule: Determines what to do when the referring | ||||||
|           object is deleted |           object is deleted | ||||||
|         """ |         """ | ||||||
| @@ -685,6 +736,13 @@ class ReferenceField(BaseField): | |||||||
|             if not issubclass(document_type, (Document, basestring)): |             if not issubclass(document_type, (Document, basestring)): | ||||||
|                 self.error('Argument to ReferenceField constructor must be a ' |                 self.error('Argument to ReferenceField constructor must be a ' | ||||||
|                            'document class or a string') |                            'document class or a string') | ||||||
|  |  | ||||||
|  |         if dbref is None: | ||||||
|  |             msg = ("ReferenceFields will default to using ObjectId " | ||||||
|  |                    " strings in 0.8, set DBRef=True if this isn't desired") | ||||||
|  |             warnings.warn(msg, FutureWarning) | ||||||
|  |  | ||||||
|  |         self.dbref = dbref if dbref is not None else True  # To change in 0.8 | ||||||
|         self.document_type_obj = document_type |         self.document_type_obj = document_type | ||||||
|         self.reverse_delete_rule = reverse_delete_rule |         self.reverse_delete_rule = reverse_delete_rule | ||||||
|         super(ReferenceField, self).__init__(**kwargs) |         super(ReferenceField, self).__init__(**kwargs) | ||||||
| @@ -707,8 +765,9 @@ class ReferenceField(BaseField): | |||||||
|  |  | ||||||
|         # Get value from document instance if available |         # Get value from document instance if available | ||||||
|         value = instance._data.get(self.name) |         value = instance._data.get(self.name) | ||||||
|  |  | ||||||
|         # Dereference DBRefs |         # Dereference DBRefs | ||||||
|         if isinstance(value, (DBRef)): |         if isinstance(value, DBRef): | ||||||
|             value = self.document_type._get_db().dereference(value) |             value = self.document_type._get_db().dereference(value) | ||||||
|             if value is not None: |             if value is not None: | ||||||
|                 instance._data[self.name] = self.document_type._from_son(value) |                 instance._data[self.name] = self.document_type._from_son(value) | ||||||
| @@ -717,6 +776,10 @@ class ReferenceField(BaseField): | |||||||
|  |  | ||||||
|     def to_mongo(self, document): |     def to_mongo(self, document): | ||||||
|         if isinstance(document, DBRef): |         if isinstance(document, DBRef): | ||||||
|  |             if not self.dbref: | ||||||
|  |                 return DBRef.id | ||||||
|  |             return document | ||||||
|  |         elif not self.dbref and isinstance(document, basestring): | ||||||
|             return document |             return document | ||||||
|  |  | ||||||
|         id_field_name = self.document_type._meta['id_field'] |         id_field_name = self.document_type._meta['id_field'] | ||||||
| @@ -724,7 +787,7 @@ class ReferenceField(BaseField): | |||||||
|  |  | ||||||
|         if isinstance(document, Document): |         if isinstance(document, Document): | ||||||
|             # We need the id from the saved object to create the DBRef |             # We need the id from the saved object to create the DBRef | ||||||
|             id_ = document.id |             id_ = document.pk | ||||||
|             if id_ is None: |             if id_ is None: | ||||||
|                 self.error('You can only reference documents once they have' |                 self.error('You can only reference documents once they have' | ||||||
|                            ' been saved to the database') |                            ' been saved to the database') | ||||||
| @@ -732,18 +795,30 @@ class ReferenceField(BaseField): | |||||||
|             id_ = document |             id_ = document | ||||||
|  |  | ||||||
|         id_ = id_field.to_mongo(id_) |         id_ = id_field.to_mongo(id_) | ||||||
|         collection = self.document_type._get_collection_name() |         if self.dbref: | ||||||
|         return DBRef(collection, id_) |             collection = self.document_type._get_collection_name() | ||||||
|  |             return DBRef(collection, id_) | ||||||
|  |  | ||||||
|  |         return id_ | ||||||
|  |  | ||||||
|  |     def to_python(self, value): | ||||||
|  |         """Convert a MongoDB-compatible type to a Python type. | ||||||
|  |         """ | ||||||
|  |         if (not self.dbref and | ||||||
|  |             not isinstance(value, (DBRef, Document, EmbeddedDocument))): | ||||||
|  |             collection = self.document_type._get_collection_name() | ||||||
|  |             value = DBRef(collection, self.document_type.id.to_python(value)) | ||||||
|  |         return value | ||||||
|  |  | ||||||
|     def prepare_query_value(self, op, value): |     def prepare_query_value(self, op, value): | ||||||
|         if value is None: |         if value is None: | ||||||
|             return None |             return None | ||||||
|  |  | ||||||
|         return self.to_mongo(value) |         return self.to_mongo(value) | ||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|  |  | ||||||
|         if not isinstance(value, (self.document_type, DBRef)): |         if not isinstance(value, (self.document_type, DBRef)): | ||||||
|             self.error('A ReferenceField only accepts DBRef') |             self.error("A ReferenceField only accepts DBRef or documents") | ||||||
|  |  | ||||||
|         if isinstance(value, Document) and value.id is None: |         if isinstance(value, Document) and value.id is None: | ||||||
|             self.error('You can only reference documents once they have been ' |             self.error('You can only reference documents once they have been ' | ||||||
| @@ -757,10 +832,12 @@ class GenericReferenceField(BaseField): | |||||||
|     """A reference to *any* :class:`~mongoengine.document.Document` subclass |     """A reference to *any* :class:`~mongoengine.document.Document` subclass | ||||||
|     that will be automatically dereferenced on access (lazily). |     that will be automatically dereferenced on access (lazily). | ||||||
|  |  | ||||||
|     ..note ::  Any documents used as a generic reference must be registered in the |     .. note :: | ||||||
|     document registry.  Importing the model will automatically register it. |         * Any documents used as a generic reference must be registered in the | ||||||
|  |           document registry.  Importing the model will automatically register | ||||||
|  |           it. | ||||||
|  |  | ||||||
|     ..note :: You can use the choices param to limit the acceptable Document types |         * You can use the choices param to limit the acceptable Document types | ||||||
|  |  | ||||||
|     .. versionadded:: 0.3 |     .. versionadded:: 0.3 | ||||||
|     """ |     """ | ||||||
| @@ -831,16 +908,20 @@ class BinaryField(BaseField): | |||||||
|         self.max_bytes = max_bytes |         self.max_bytes = max_bytes | ||||||
|         super(BinaryField, self).__init__(**kwargs) |         super(BinaryField, self).__init__(**kwargs) | ||||||
|  |  | ||||||
|  |     def __set__(self, instance, value): | ||||||
|  |         """Handle bytearrays in python 3.1""" | ||||||
|  |         if PY3 and isinstance(value, bytearray): | ||||||
|  |             value = bin_type(value) | ||||||
|  |         return super(BinaryField, self).__set__(instance, value) | ||||||
|  |  | ||||||
|     def to_mongo(self, value): |     def to_mongo(self, value): | ||||||
|         return Binary(value) |         return Binary(value) | ||||||
|  |  | ||||||
|     def to_python(self, value): |  | ||||||
|         # Returns str not unicode as this is binary data |  | ||||||
|         return str(value) |  | ||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         if not isinstance(value, str): |         if not isinstance(value, (bin_type, txt_type, Binary)): | ||||||
|             self.error('BinaryField only accepts string values') |             self.error("BinaryField only accepts instances of " | ||||||
|  |                        "(%s, %s, Binary)" % ( | ||||||
|  |                         bin_type.__name__, txt_type.__name__)) | ||||||
|  |  | ||||||
|         if self.max_bytes is not None and len(value) > self.max_bytes: |         if self.max_bytes is not None and len(value) > self.max_bytes: | ||||||
|             self.error('Binary value is too long') |             self.error('Binary value is too long') | ||||||
| @@ -896,9 +977,13 @@ class GridFSProxy(object): | |||||||
|     def __repr__(self): |     def __repr__(self): | ||||||
|         return '<%s: %s>' % (self.__class__.__name__, self.grid_id) |         return '<%s: %s>' % (self.__class__.__name__, self.grid_id) | ||||||
|  |  | ||||||
|     def __cmp__(self, other): |     def __eq__(self, other): | ||||||
|         return cmp((self.grid_id, self.collection_name, self.db_alias), |         if isinstance(other, GridFSProxy): | ||||||
|                    (other.grid_id, other.collection_name, other.db_alias)) |             return  ((self.grid_id == other.grid_id) and | ||||||
|  |                      (self.collection_name == other.collection_name) and | ||||||
|  |                      (self.db_alias == other.db_alias)) | ||||||
|  |         else: | ||||||
|  |             return False | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def fs(self): |     def fs(self): | ||||||
| @@ -1011,7 +1096,8 @@ class FileField(BaseField): | |||||||
|  |  | ||||||
|     def __set__(self, instance, value): |     def __set__(self, instance, value): | ||||||
|         key = self.name |         key = self.name | ||||||
|         if (hasattr(value, 'read') and not isinstance(value, GridFSProxy)) or isinstance(value, str): |         if ((hasattr(value, 'read') and not | ||||||
|  |              isinstance(value, GridFSProxy)) or isinstance(value, str_types)): | ||||||
|             # using "FileField() = file/string" notation |             # using "FileField() = file/string" notation | ||||||
|             grid_file = instance._data.get(self.name) |             grid_file = instance._data.get(self.name) | ||||||
|             # If a file already exists, delete it |             # If a file already exists, delete it | ||||||
| @@ -1067,6 +1153,7 @@ class ImageGridFsProxy(GridFSProxy): | |||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             img = Image.open(file_obj) |             img = Image.open(file_obj) | ||||||
|  |             img_format = img.format | ||||||
|         except: |         except: | ||||||
|             raise ValidationError('Invalid image') |             raise ValidationError('Invalid image') | ||||||
|  |  | ||||||
| @@ -1101,20 +1188,20 @@ class ImageGridFsProxy(GridFSProxy): | |||||||
|  |  | ||||||
|         if thumbnail: |         if thumbnail: | ||||||
|             thumb_id = self._put_thumbnail(thumbnail, |             thumb_id = self._put_thumbnail(thumbnail, | ||||||
|                                           img.format) |                                           img_format) | ||||||
|         else: |         else: | ||||||
|             thumb_id = None |             thumb_id = None | ||||||
|  |  | ||||||
|         w, h = img.size |         w, h = img.size | ||||||
|  |  | ||||||
|         io = StringIO() |         io = StringIO() | ||||||
|         img.save(io, img.format) |         img.save(io, img_format) | ||||||
|         io.seek(0) |         io.seek(0) | ||||||
|  |  | ||||||
|         return super(ImageGridFsProxy, self).put(io, |         return super(ImageGridFsProxy, self).put(io, | ||||||
|                                                  width=w, |                                                  width=w, | ||||||
|                                                  height=h, |                                                  height=h, | ||||||
|                                                  format=img.format, |                                                  format=img_format, | ||||||
|                                                  thumbnail_id=thumb_id, |                                                  thumbnail_id=thumb_id, | ||||||
|                                                  **kwargs) |                                                  **kwargs) | ||||||
|  |  | ||||||
| @@ -1200,11 +1287,15 @@ class ImageField(FileField): | |||||||
|         params_size = ('width', 'height', 'force') |         params_size = ('width', 'height', 'force') | ||||||
|         extra_args = dict(size=size, thumbnail_size=thumbnail_size) |         extra_args = dict(size=size, thumbnail_size=thumbnail_size) | ||||||
|         for att_name, att in extra_args.items(): |         for att_name, att in extra_args.items(): | ||||||
|             if att and (isinstance(att, tuple) or isinstance(att, list)): |             value = None | ||||||
|                 setattr(self, att_name, dict( |             if isinstance(att, (tuple, list)): | ||||||
|                         map(None, params_size, att))) |                 if PY3: | ||||||
|             else: |                     value = dict(itertools.zip_longest(params_size, att, | ||||||
|                 setattr(self, att_name, None) |                                                         fillvalue=None)) | ||||||
|  |                 else: | ||||||
|  |                     value = dict(map(None, params_size, att)) | ||||||
|  |  | ||||||
|  |             setattr(self, att_name, value) | ||||||
|  |  | ||||||
|         super(ImageField, self).__init__( |         super(ImageField, self).__init__( | ||||||
|             collection_name=collection_name, |             collection_name=collection_name, | ||||||
| @@ -1246,18 +1337,19 @@ class SequenceField(IntField): | |||||||
|  |  | ||||||
|     .. versionadded:: 0.5 |     .. versionadded:: 0.5 | ||||||
|     """ |     """ | ||||||
|     def __init__(self, collection_name=None, db_alias = None, *args, **kwargs): |     def __init__(self, collection_name=None, db_alias = None, sequence_name = None, *args, **kwargs): | ||||||
|         self.collection_name = collection_name or 'mongoengine.counters' |         self.collection_name = collection_name or 'mongoengine.counters' | ||||||
|         self.db_alias = db_alias or DEFAULT_CONNECTION_NAME |         self.db_alias = db_alias or DEFAULT_CONNECTION_NAME | ||||||
|  |         self.sequence_name = sequence_name | ||||||
|         return super(SequenceField, self).__init__(*args, **kwargs) |         return super(SequenceField, self).__init__(*args, **kwargs) | ||||||
|  |  | ||||||
|     def generate_new_value(self): |     def generate_new_value(self): | ||||||
|         """ |         """ | ||||||
|         Generate and Increment the counter |         Generate and Increment the counter | ||||||
|         """ |         """ | ||||||
|         sequence_id = "{0}.{1}".format(self.owner_document._get_collection_name(), |         sequence_name = self.sequence_name or self.owner_document._get_collection_name() | ||||||
|                                        self.name) |         sequence_id = "%s.%s" % (sequence_name, self.name) | ||||||
|         collection = get_db(alias = self.db_alias )[self.collection_name] |         collection = get_db(alias=self.db_alias)[self.collection_name] | ||||||
|         counter = collection.find_and_modify(query={"_id": sequence_id}, |         counter = collection.find_and_modify(query={"_id": sequence_id}, | ||||||
|                                              update={"$inc": {"next": 1}}, |                                              update={"$inc": {"next": 1}}, | ||||||
|                                              new=True, |                                              new=True, | ||||||
| @@ -1279,7 +1371,7 @@ class SequenceField(IntField): | |||||||
|             instance._data[self.name] = value |             instance._data[self.name] = value | ||||||
|             instance._mark_as_changed(self.name) |             instance._mark_as_changed(self.name) | ||||||
|  |  | ||||||
|         return value |         return int(value) if value else None | ||||||
|  |  | ||||||
|     def __set__(self, instance, value): |     def __set__(self, instance, value): | ||||||
|  |  | ||||||
| @@ -1299,17 +1391,44 @@ class UUIDField(BaseField): | |||||||
|  |  | ||||||
|     .. versionadded:: 0.6 |     .. versionadded:: 0.6 | ||||||
|     """ |     """ | ||||||
|  |     _binary = None | ||||||
|  |  | ||||||
|     def __init__(self, **kwargs): |     def __init__(self, binary=None, **kwargs): | ||||||
|  |         """ | ||||||
|  |         Store UUID data in the database | ||||||
|  |  | ||||||
|  |         :param binary: (optional) boolean store as binary. | ||||||
|  |  | ||||||
|  |         .. versionchanged:: 0.6.19 | ||||||
|  |         """ | ||||||
|  |         if binary is None: | ||||||
|  |             binary = False | ||||||
|  |             msg = ("UUIDFields will soon default to store as binary, please " | ||||||
|  |                   "configure binary=False if you wish to store as a string") | ||||||
|  |             warnings.warn(msg, FutureWarning) | ||||||
|  |         self._binary = binary | ||||||
|         super(UUIDField, self).__init__(**kwargs) |         super(UUIDField, self).__init__(**kwargs) | ||||||
|  |  | ||||||
|     def to_python(self, value): |     def to_python(self, value): | ||||||
|         if not isinstance(value, basestring): |         if not self._binary: | ||||||
|             value = unicode(value) |             original_value = value | ||||||
|         return uuid.UUID(value) |             try: | ||||||
|  |                 if not isinstance(value, basestring): | ||||||
|  |                     value = unicode(value) | ||||||
|  |                 return uuid.UUID(value) | ||||||
|  |             except: | ||||||
|  |                 return original_value | ||||||
|  |         return value | ||||||
|  |  | ||||||
|     def to_mongo(self, value): |     def to_mongo(self, value): | ||||||
|         return unicode(value) |         if not self._binary: | ||||||
|  |             return unicode(value) | ||||||
|  |         return value | ||||||
|  |  | ||||||
|  |     def prepare_query_value(self, op, value): | ||||||
|  |         if value is None: | ||||||
|  |             return None | ||||||
|  |         return self.to_mongo(value) | ||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         if not isinstance(value, uuid.UUID): |         if not isinstance(value, uuid.UUID): | ||||||
|   | |||||||
							
								
								
									
										61
									
								
								mongoengine/python_support.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										61
									
								
								mongoengine/python_support.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,61 @@ | |||||||
|  | """Helper functions and types to aid with Python 2.5 - 3 support.""" | ||||||
|  |  | ||||||
|  | import sys | ||||||
|  |  | ||||||
|  | PY3 = sys.version_info[0] == 3 | ||||||
|  | PY25 = sys.version_info[:2] == (2, 5) | ||||||
|  | UNICODE_KWARGS = int(''.join([str(x) for x in sys.version_info[:3]])) > 264 | ||||||
|  |  | ||||||
|  | if PY3: | ||||||
|  |     import codecs | ||||||
|  |     from io import BytesIO as StringIO | ||||||
|  |     # return s converted to binary.  b('test') should be equivalent to b'test' | ||||||
|  |     def b(s): | ||||||
|  |         return codecs.latin_1_encode(s)[0] | ||||||
|  |  | ||||||
|  |     bin_type = bytes | ||||||
|  |     txt_type   = str | ||||||
|  | else: | ||||||
|  |     try: | ||||||
|  |         from cStringIO import StringIO | ||||||
|  |     except ImportError: | ||||||
|  |         from StringIO import StringIO | ||||||
|  |  | ||||||
|  |     # Conversion to binary only necessary in Python 3 | ||||||
|  |     def b(s): | ||||||
|  |         return s | ||||||
|  |  | ||||||
|  |     bin_type = str | ||||||
|  |     txt_type = unicode | ||||||
|  |  | ||||||
|  | str_types = (bin_type, txt_type) | ||||||
|  |  | ||||||
|  | if PY25: | ||||||
|  |     def product(*args, **kwds): | ||||||
|  |         pools = map(tuple, args) * kwds.get('repeat', 1) | ||||||
|  |         result = [[]] | ||||||
|  |         for pool in pools: | ||||||
|  |             result = [x + [y] for x in result for y in pool] | ||||||
|  |         for prod in result: | ||||||
|  |             yield tuple(prod) | ||||||
|  |     reduce = reduce | ||||||
|  | else: | ||||||
|  |     from itertools import product | ||||||
|  |     from functools import reduce | ||||||
|  |  | ||||||
|  |  | ||||||
|  | # For use with Python 2.5 | ||||||
|  | # converts all keys from unicode to str for d and all nested dictionaries | ||||||
|  | def to_str_keys_recursive(d): | ||||||
|  |     if isinstance(d, list): | ||||||
|  |         for val in d: | ||||||
|  |             if isinstance(val, (dict, list)): | ||||||
|  |                 to_str_keys_recursive(val) | ||||||
|  |     elif isinstance(d, dict): | ||||||
|  |         for key, val in d.items(): | ||||||
|  |             if isinstance(val, (dict, list)): | ||||||
|  |                 to_str_keys_recursive(val) | ||||||
|  |             if isinstance(key, unicode): | ||||||
|  |                 d[str(key)] = d.pop(key) | ||||||
|  |     else: | ||||||
|  |         raise ValueError("non list/dict parameter not allowed") | ||||||
| @@ -4,6 +4,11 @@ import copy | |||||||
| import itertools | import itertools | ||||||
| import operator | import operator | ||||||
|  |  | ||||||
|  | from collections import defaultdict | ||||||
|  | from functools import partial | ||||||
|  |  | ||||||
|  | from mongoengine.python_support import product, reduce | ||||||
|  |  | ||||||
| import pymongo | import pymongo | ||||||
| from bson.code import Code | from bson.code import Code | ||||||
|  |  | ||||||
| @@ -40,6 +45,10 @@ class OperationError(Exception): | |||||||
|     pass |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class NotUniqueError(OperationError): | ||||||
|  |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
| RE_TYPE = type(re.compile('')) | RE_TYPE = type(re.compile('')) | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -118,7 +127,7 @@ class QueryTreeTransformerVisitor(QNodeVisitor): | |||||||
|             # the necessary parts. Then for each $or part, create a new query |             # the necessary parts. Then for each $or part, create a new query | ||||||
|             # that ANDs the necessary part with the $or part. |             # that ANDs the necessary part with the $or part. | ||||||
|             clauses = [] |             clauses = [] | ||||||
|             for or_group in itertools.product(*or_groups): |             for or_group in product(*or_groups): | ||||||
|                 q_object = reduce(lambda a, b: a & b, and_parts, Q()) |                 q_object = reduce(lambda a, b: a & b, and_parts, Q()) | ||||||
|                 q_object = reduce(lambda a, b: a & b, or_group, q_object) |                 q_object = reduce(lambda a, b: a & b, or_group, q_object) | ||||||
|                 clauses.append(q_object) |                 clauses.append(q_object) | ||||||
| @@ -209,7 +218,7 @@ class QNode(object): | |||||||
|     def _combine(self, other, operation): |     def _combine(self, other, operation): | ||||||
|         """Combine this node with another node into a QCombination object. |         """Combine this node with another node into a QCombination object. | ||||||
|         """ |         """ | ||||||
|         if other.empty: |         if getattr(other, 'empty', True): | ||||||
|             return self |             return self | ||||||
|  |  | ||||||
|         if self.empty: |         if self.empty: | ||||||
| @@ -327,6 +336,7 @@ class QuerySet(object): | |||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     __already_indexed = set() |     __already_indexed = set() | ||||||
|  |     __dereference = False | ||||||
|  |  | ||||||
|     def __init__(self, document, collection): |     def __init__(self, document, collection): | ||||||
|         self._document = document |         self._document = document | ||||||
| @@ -346,7 +356,7 @@ class QuerySet(object): | |||||||
|  |  | ||||||
|         # If inheritance is allowed, only return instances and instances of |         # If inheritance is allowed, only return instances and instances of | ||||||
|         # subclasses of the class being used |         # subclasses of the class being used | ||||||
|         if document._meta.get('allow_inheritance'): |         if document._meta.get('allow_inheritance') != False: | ||||||
|             self._initial_query = {'_types': self._document._class_name} |             self._initial_query = {'_types': self._document._class_name} | ||||||
|             self._loaded_fields = QueryFieldList(always_include=['_cls']) |             self._loaded_fields = QueryFieldList(always_include=['_cls']) | ||||||
|         self._cursor_obj = None |         self._cursor_obj = None | ||||||
| @@ -388,12 +398,13 @@ class QuerySet(object): | |||||||
|             or a **-** to determine the index ordering |             or a **-** to determine the index ordering | ||||||
|         """ |         """ | ||||||
|         index_spec = QuerySet._build_index_spec(self._document, key_or_list) |         index_spec = QuerySet._build_index_spec(self._document, key_or_list) | ||||||
|         self._collection.ensure_index( |         index_spec = index_spec.copy() | ||||||
|             index_spec['fields'], |         fields = index_spec.pop('fields') | ||||||
|             drop_dups=drop_dups, |         index_spec['drop_dups'] = drop_dups | ||||||
|             background=background, |         index_spec['background'] = background | ||||||
|             sparse=index_spec.get('sparse', False), |         index_spec.update(kwargs) | ||||||
|             unique=index_spec.get('unique', False)) |  | ||||||
|  |         self._collection.ensure_index(fields, **index_spec) | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
|     def __call__(self, q_obj=None, class_check=True, slave_okay=False, **query): |     def __call__(self, q_obj=None, class_check=True, slave_okay=False, **query): | ||||||
| @@ -436,7 +447,7 @@ class QuerySet(object): | |||||||
|         """ |         """ | ||||||
|         background = self._document._meta.get('index_background', False) |         background = self._document._meta.get('index_background', False) | ||||||
|         drop_dups = self._document._meta.get('index_drop_dups', False) |         drop_dups = self._document._meta.get('index_drop_dups', False) | ||||||
|         index_opts = self._document._meta.get('index_opts', {}) |         index_opts = self._document._meta.get('index_opts') or {} | ||||||
|         index_types = self._document._meta.get('index_types', True) |         index_types = self._document._meta.get('index_types', True) | ||||||
|  |  | ||||||
|         # determine if an index which we are creating includes |         # determine if an index which we are creating includes | ||||||
| @@ -444,6 +455,7 @@ class QuerySet(object): | |||||||
|         # an extra index on _type, as mongodb will use the existing |         # an extra index on _type, as mongodb will use the existing | ||||||
|         # index to service queries against _type |         # index to service queries against _type | ||||||
|         types_indexed = False |         types_indexed = False | ||||||
|  |  | ||||||
|         def includes_types(fields): |         def includes_types(fields): | ||||||
|             first_field = None |             first_field = None | ||||||
|             if len(fields): |             if len(fields): | ||||||
| @@ -460,13 +472,15 @@ class QuerySet(object): | |||||||
|                 background=background, drop_dups=drop_dups, **index_opts) |                 background=background, drop_dups=drop_dups, **index_opts) | ||||||
|  |  | ||||||
|         # Ensure document-defined indexes are created |         # Ensure document-defined indexes are created | ||||||
|         if self._document._meta['indexes']: |         if self._document._meta['index_specs']: | ||||||
|             for spec in self._document._meta['indexes']: |             index_spec = self._document._meta['index_specs'] | ||||||
|                 types_indexed = types_indexed or includes_types(spec['fields']) |             for spec in index_spec: | ||||||
|  |                 spec = spec.copy() | ||||||
|  |                 fields = spec.pop('fields') | ||||||
|  |                 types_indexed = types_indexed or includes_types(fields) | ||||||
|                 opts = index_opts.copy() |                 opts = index_opts.copy() | ||||||
|                 opts['unique'] = spec.get('unique', False) |                 opts.update(spec) | ||||||
|                 opts['sparse'] = spec.get('sparse', False) |                 self._collection.ensure_index(fields, | ||||||
|                 self._collection.ensure_index(spec['fields'], |  | ||||||
|                     background=background, **opts) |                     background=background, **opts) | ||||||
|  |  | ||||||
|         # If _types is being used (for polymorphism), it needs an index, |         # If _types is being used (for polymorphism), it needs an index, | ||||||
| @@ -481,19 +495,30 @@ class QuerySet(object): | |||||||
|             self._collection.ensure_index(index_spec, |             self._collection.ensure_index(index_spec, | ||||||
|                 background=background, **index_opts) |                 background=background, **index_opts) | ||||||
|  |  | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def _build_index_spec(cls, doc_cls, spec): |     def _build_index_spec(cls, doc_cls, spec): | ||||||
|         """Build a PyMongo index spec from a MongoEngine index spec. |         """Build a PyMongo index spec from a MongoEngine index spec. | ||||||
|         """ |         """ | ||||||
|         if isinstance(spec, basestring): |         if isinstance(spec, basestring): | ||||||
|             spec = {'fields': [spec]} |             spec = {'fields': [spec]} | ||||||
|         if isinstance(spec, (list, tuple)): |         elif isinstance(spec, (list, tuple)): | ||||||
|             spec = {'fields': spec} |             spec = {'fields': list(spec)} | ||||||
|  |         elif isinstance(spec, dict): | ||||||
|  |             spec = dict(spec) | ||||||
|  |  | ||||||
|         index_list = [] |         index_list = [] | ||||||
|         use_types = doc_cls._meta.get('allow_inheritance', True) |         direction = None | ||||||
|  |  | ||||||
|  |         allow_inheritance = doc_cls._meta.get('allow_inheritance') != False | ||||||
|  |  | ||||||
|  |         # If sparse - dont include types | ||||||
|  |         use_types = allow_inheritance and not spec.get('sparse', False) | ||||||
|  |  | ||||||
|         for key in spec['fields']: |         for key in spec['fields']: | ||||||
|  |             # If inherited spec continue | ||||||
|  |             if isinstance(key, (list, tuple)): | ||||||
|  |                 continue | ||||||
|  |  | ||||||
|             # Get ASCENDING direction from +, DESCENDING from -, and GEO2D from * |             # Get ASCENDING direction from +, DESCENDING from -, and GEO2D from * | ||||||
|             direction = pymongo.ASCENDING |             direction = pymongo.ASCENDING | ||||||
|             if key.startswith("-"): |             if key.startswith("-"): | ||||||
| @@ -508,24 +533,23 @@ class QuerySet(object): | |||||||
|             parts = key.split('.') |             parts = key.split('.') | ||||||
|             if parts in (['pk'], ['id'], ['_id']): |             if parts in (['pk'], ['id'], ['_id']): | ||||||
|                 key = '_id' |                 key = '_id' | ||||||
|  |                 fields = [] | ||||||
|             else: |             else: | ||||||
|                 fields = QuerySet._lookup_field(doc_cls, parts) |                 fields = QuerySet._lookup_field(doc_cls, parts) | ||||||
|                 parts = [field if field == '_id' else field.db_field for field in fields] |                 parts = [field if field == '_id' else field.db_field | ||||||
|  |                          for field in fields] | ||||||
|                 key = '.'.join(parts) |                 key = '.'.join(parts) | ||||||
|             index_list.append((key, direction)) |             index_list.append((key, direction)) | ||||||
|  |  | ||||||
|             # If sparse - dont include types |  | ||||||
|             if spec.get('sparse', False): |  | ||||||
|                 use_types = False |  | ||||||
|  |  | ||||||
|             # Check if a list field is being used, don't use _types if it is |             # Check if a list field is being used, don't use _types if it is | ||||||
|             if use_types and not all(f._index_with_types for f in fields): |             if use_types and not all(f._index_with_types for f in fields): | ||||||
|                 use_types = False |                 use_types = False | ||||||
|  |  | ||||||
|         # If _types is being used, prepend it to every specified index |         # If _types is being used, prepend it to every specified index | ||||||
|         index_types = doc_cls._meta.get('index_types', True) |         index_types = doc_cls._meta.get('index_types', True) | ||||||
|         allow_inheritance = doc_cls._meta.get('allow_inheritance') |  | ||||||
|         if spec.get('types', index_types) and allow_inheritance and use_types and direction is not pymongo.GEO2D: |         if (spec.get('types', index_types) and use_types | ||||||
|  |             and direction is not pymongo.GEO2D): | ||||||
|             index_list.insert(0, ('_types', 1)) |             index_list.insert(0, ('_types', 1)) | ||||||
|  |  | ||||||
|         spec['fields'] = index_list |         spec['fields'] = index_list | ||||||
| @@ -598,7 +622,6 @@ class QuerySet(object): | |||||||
|  |  | ||||||
|             if self._hint != -1: |             if self._hint != -1: | ||||||
|                 self._cursor_obj.hint(self._hint) |                 self._cursor_obj.hint(self._hint) | ||||||
|  |  | ||||||
|         return self._cursor_obj |         return self._cursor_obj | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
| @@ -639,7 +662,7 @@ class QuerySet(object): | |||||||
|                 from mongoengine.fields import ReferenceField, GenericReferenceField |                 from mongoengine.fields import ReferenceField, GenericReferenceField | ||||||
|                 if isinstance(field, (ReferenceField, GenericReferenceField)): |                 if isinstance(field, (ReferenceField, GenericReferenceField)): | ||||||
|                     raise InvalidQueryError('Cannot perform join in mongoDB: %s' % '__'.join(parts)) |                     raise InvalidQueryError('Cannot perform join in mongoDB: %s' % '__'.join(parts)) | ||||||
|                 if getattr(field, 'field', None): |                 if hasattr(getattr(field, 'field', None), 'lookup_member'): | ||||||
|                     new_field = field.field.lookup_member(field_name) |                     new_field = field.field.lookup_member(field_name) | ||||||
|                 else: |                 else: | ||||||
|                    # Look up subfield on the previous field |                    # Look up subfield on the previous field | ||||||
| @@ -676,6 +699,7 @@ class QuerySet(object): | |||||||
|         custom_operators = ['match'] |         custom_operators = ['match'] | ||||||
|  |  | ||||||
|         mongo_query = {} |         mongo_query = {} | ||||||
|  |         merge_query = defaultdict(list) | ||||||
|         for key, value in query.items(): |         for key, value in query.items(): | ||||||
|             if key == "__raw__": |             if key == "__raw__": | ||||||
|                 mongo_query.update(value) |                 mongo_query.update(value) | ||||||
| @@ -702,7 +726,7 @@ class QuerySet(object): | |||||||
|                 cleaned_fields = [] |                 cleaned_fields = [] | ||||||
|                 for field in fields: |                 for field in fields: | ||||||
|                     append_field = True |                     append_field = True | ||||||
|                     if isinstance(field, str): |                     if isinstance(field, basestring): | ||||||
|                         parts.append(field) |                         parts.append(field) | ||||||
|                         append_field = False |                         append_field = False | ||||||
|                     else: |                     else: | ||||||
| @@ -763,8 +787,23 @@ class QuerySet(object): | |||||||
|             key = '.'.join(parts) |             key = '.'.join(parts) | ||||||
|             if op is None or key not in mongo_query: |             if op is None or key not in mongo_query: | ||||||
|                 mongo_query[key] = value |                 mongo_query[key] = value | ||||||
|             elif key in mongo_query and isinstance(mongo_query[key], dict): |             elif key in mongo_query: | ||||||
|                 mongo_query[key].update(value) |                 if key in mongo_query and isinstance(mongo_query[key], dict): | ||||||
|  |                     mongo_query[key].update(value) | ||||||
|  |                 else: | ||||||
|  |                     # Store for manually merging later | ||||||
|  |                     merge_query[key].append(value) | ||||||
|  |  | ||||||
|  |         # The queryset has been filter in such a way we must manually merge | ||||||
|  |         for k, v in merge_query.items(): | ||||||
|  |             merge_query[k].append(mongo_query[k]) | ||||||
|  |             del mongo_query[k] | ||||||
|  |             if isinstance(v, list): | ||||||
|  |                 value = [{k:val} for val in v] | ||||||
|  |                 if '$and' in mongo_query.keys(): | ||||||
|  |                     mongo_query['$and'].append(value) | ||||||
|  |                 else: | ||||||
|  |                     mongo_query['$and'] = value | ||||||
|  |  | ||||||
|         return mongo_query |         return mongo_query | ||||||
|  |  | ||||||
| @@ -804,19 +843,18 @@ class QuerySet(object): | |||||||
|         keyword argument called :attr:`defaults`. |         keyword argument called :attr:`defaults`. | ||||||
|  |  | ||||||
|         .. note:: This requires two separate operations and therefore a |         .. note:: This requires two separate operations and therefore a | ||||||
|         race condition exists.  Because there are no transactions in mongoDB |             race condition exists.  Because there are no transactions in mongoDB | ||||||
|         other approaches should be investigated, to ensure you don't |             other approaches should be investigated, to ensure you don't | ||||||
|         accidently duplicate data when using this method. |             accidently duplicate data when using this method. | ||||||
|  |  | ||||||
|         :param write_options: optional extra keyword arguments used if we |         :param write_options: optional extra keyword arguments used if we | ||||||
|             have to create a new document. |             have to create a new document. | ||||||
|             Passes any write_options onto :meth:`~mongoengine.Document.save` |             Passes any write_options onto :meth:`~mongoengine.Document.save` | ||||||
|  |  | ||||||
|         .. versionadded:: 0.3 |  | ||||||
|  |  | ||||||
|         :param auto_save: if the object is to be saved automatically if not found. |         :param auto_save: if the object is to be saved automatically if not found. | ||||||
|  |  | ||||||
|         .. versionadded:: 0.6 |         .. versionchanged:: 0.6 - added `auto_save` | ||||||
|  |         .. versionadded:: 0.3 | ||||||
|         """ |         """ | ||||||
|         defaults = query.get('defaults', {}) |         defaults = query.get('defaults', {}) | ||||||
|         if 'defaults' in query: |         if 'defaults' in query: | ||||||
| @@ -899,8 +937,11 @@ class QuerySet(object): | |||||||
|             ids = self._collection.insert(raw, **write_options) |             ids = self._collection.insert(raw, **write_options) | ||||||
|         except pymongo.errors.OperationFailure, err: |         except pymongo.errors.OperationFailure, err: | ||||||
|             message = 'Could not save document (%s)' |             message = 'Could not save document (%s)' | ||||||
|             if u'duplicate key' in unicode(err): |             if re.match('^E1100[01] duplicate key', unicode(err)): | ||||||
|  |                 # E11000 - duplicate key error index | ||||||
|  |                 # E11001 - duplicate key on update | ||||||
|                 message = u'Tried to save duplicate unique keys (%s)' |                 message = u'Tried to save duplicate unique keys (%s)' | ||||||
|  |                 raise NotUniqueError(message % unicode(err)) | ||||||
|             raise OperationError(message % unicode(err)) |             raise OperationError(message % unicode(err)) | ||||||
|  |  | ||||||
|         if not load_bulk: |         if not load_bulk: | ||||||
| @@ -1000,6 +1041,8 @@ class QuerySet(object): | |||||||
|                          :class:`~bson.code.Code` or string |                          :class:`~bson.code.Code` or string | ||||||
|         :param output: output collection name, if set to 'inline' will try to |         :param output: output collection name, if set to 'inline' will try to | ||||||
|                        use :class:`~pymongo.collection.Collection.inline_map_reduce` |                        use :class:`~pymongo.collection.Collection.inline_map_reduce` | ||||||
|  |                        This can also be a dictionary containing output options | ||||||
|  |                        see: http://docs.mongodb.org/manual/reference/commands/#mapReduce | ||||||
|         :param finalize_f: finalize function, an optional function that |         :param finalize_f: finalize function, an optional function that | ||||||
|                            performs any post-reduction processing. |                            performs any post-reduction processing. | ||||||
|         :param scope: values to insert into map/reduce global scope. Optional. |         :param scope: values to insert into map/reduce global scope. Optional. | ||||||
| @@ -1151,9 +1194,10 @@ class QuerySet(object): | |||||||
|  |  | ||||||
|         .. versionadded:: 0.4 |         .. versionadded:: 0.4 | ||||||
|         .. versionchanged:: 0.5 - Fixed handling references |         .. versionchanged:: 0.5 - Fixed handling references | ||||||
|  |         .. versionchanged:: 0.6 - Improved db_field refrence handling | ||||||
|         """ |         """ | ||||||
|         from dereference import DeReference |         return self._dereference(self._cursor.distinct(field), 1, | ||||||
|         return DeReference()(self._cursor.distinct(field), 1) |                                  name=field, instance=self._document) | ||||||
|  |  | ||||||
|     def only(self, *fields): |     def only(self, *fields): | ||||||
|         """Load only a subset of this document's fields. :: |         """Load only a subset of this document's fields. :: | ||||||
| @@ -1308,9 +1352,16 @@ class QuerySet(object): | |||||||
|         """ |         """ | ||||||
|         doc = self._document |         doc = self._document | ||||||
|  |  | ||||||
|  |         # Handle deletes where skips or limits have been applied | ||||||
|  |         if self._skip or self._limit: | ||||||
|  |             for doc in self: | ||||||
|  |                 doc.delete() | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         delete_rules = doc._meta.get('delete_rules') or {} | ||||||
|         # Check for DENY rules before actually deleting/nullifying any other |         # Check for DENY rules before actually deleting/nullifying any other | ||||||
|         # references |         # references | ||||||
|         for rule_entry in doc._meta['delete_rules']: |         for rule_entry in delete_rules: | ||||||
|             document_cls, field_name = rule_entry |             document_cls, field_name = rule_entry | ||||||
|             rule = doc._meta['delete_rules'][rule_entry] |             rule = doc._meta['delete_rules'][rule_entry] | ||||||
|             if rule == DENY and document_cls.objects(**{field_name + '__in': self}).count() > 0: |             if rule == DENY and document_cls.objects(**{field_name + '__in': self}).count() > 0: | ||||||
| @@ -1318,12 +1369,14 @@ class QuerySet(object): | |||||||
|                         (document_cls.__name__, field_name) |                         (document_cls.__name__, field_name) | ||||||
|                 raise OperationError(msg) |                 raise OperationError(msg) | ||||||
|  |  | ||||||
|         for rule_entry in doc._meta['delete_rules']: |         for rule_entry in delete_rules: | ||||||
|             document_cls, field_name = rule_entry |             document_cls, field_name = rule_entry | ||||||
|             rule = doc._meta['delete_rules'][rule_entry] |             rule = doc._meta['delete_rules'][rule_entry] | ||||||
|             if rule == CASCADE: |             if rule == CASCADE: | ||||||
|                 ref_q = document_cls.objects(**{field_name + '__in': self}) |                 ref_q = document_cls.objects(**{field_name + '__in': self}) | ||||||
|                 if doc != document_cls or (doc == document_cls and ref_q.count() > 0): |                 ref_q_count = ref_q.count() | ||||||
|  |                 if (doc != document_cls and ref_q_count > 0 | ||||||
|  |                     or (doc == document_cls and ref_q_count > 0)): | ||||||
|                     ref_q.delete(safe=safe) |                     ref_q.delete(safe=safe) | ||||||
|             elif rule == NULLIFY: |             elif rule == NULLIFY: | ||||||
|                 document_cls.objects(**{field_name + '__in': self}).update( |                 document_cls.objects(**{field_name + '__in': self}).update( | ||||||
| @@ -1342,6 +1395,8 @@ class QuerySet(object): | |||||||
|         """ |         """ | ||||||
|         operators = ['set', 'unset', 'inc', 'dec', 'pop', 'push', 'push_all', |         operators = ['set', 'unset', 'inc', 'dec', 'pop', 'push', 'push_all', | ||||||
|                      'pull', 'pull_all', 'add_to_set'] |                      'pull', 'pull_all', 'add_to_set'] | ||||||
|  |         match_operators = ['ne', 'gt', 'gte', 'lt', 'lte', 'in', 'nin', 'mod', | ||||||
|  |                            'all', 'size', 'exists', 'not'] | ||||||
|  |  | ||||||
|         mongo_update = {} |         mongo_update = {} | ||||||
|         for key, value in update.items(): |         for key, value in update.items(): | ||||||
| @@ -1365,6 +1420,10 @@ class QuerySet(object): | |||||||
|                 elif op == 'add_to_set': |                 elif op == 'add_to_set': | ||||||
|                     op = op.replace('_to_set', 'ToSet') |                     op = op.replace('_to_set', 'ToSet') | ||||||
|  |  | ||||||
|  |             match = None | ||||||
|  |             if parts[-1] in match_operators: | ||||||
|  |                 match = parts.pop() | ||||||
|  |  | ||||||
|             if _doc_cls: |             if _doc_cls: | ||||||
|                 # Switch field names to proper names [set in Field(name='foo')] |                 # Switch field names to proper names [set in Field(name='foo')] | ||||||
|                 fields = QuerySet._lookup_field(_doc_cls, parts) |                 fields = QuerySet._lookup_field(_doc_cls, parts) | ||||||
| @@ -1373,7 +1432,7 @@ class QuerySet(object): | |||||||
|                 cleaned_fields = [] |                 cleaned_fields = [] | ||||||
|                 for field in fields: |                 for field in fields: | ||||||
|                     append_field = True |                     append_field = True | ||||||
|                     if isinstance(field, str): |                     if isinstance(field, basestring): | ||||||
|                         # Convert the S operator to $ |                         # Convert the S operator to $ | ||||||
|                         if field == 'S': |                         if field == 'S': | ||||||
|                             field = '$' |                             field = '$' | ||||||
| @@ -1387,26 +1446,39 @@ class QuerySet(object): | |||||||
|                 # Convert value to proper value |                 # Convert value to proper value | ||||||
|                 field = cleaned_fields[-1] |                 field = cleaned_fields[-1] | ||||||
|  |  | ||||||
|                 if op in (None, 'set', 'push', 'pull', 'addToSet'): |                 if op in (None, 'set', 'push', 'pull'): | ||||||
|                     if field.required or value is not None: |                     if field.required or value is not None: | ||||||
|                         value = field.prepare_query_value(op, value) |                         value = field.prepare_query_value(op, value) | ||||||
|                 elif op in ('pushAll', 'pullAll'): |                 elif op in ('pushAll', 'pullAll'): | ||||||
|                     value = [field.prepare_query_value(op, v) for v in value] |                     value = [field.prepare_query_value(op, v) for v in value] | ||||||
|  |                 elif op == 'addToSet': | ||||||
|  |                     if isinstance(value, (list, tuple, set)): | ||||||
|  |                         value = [field.prepare_query_value(op, v) for v in value] | ||||||
|  |                     elif field.required or value is not None: | ||||||
|  |                         value = field.prepare_query_value(op, value) | ||||||
|  |  | ||||||
|  |             if match: | ||||||
|  |                 match = '$' + match | ||||||
|  |                 value = {match: value} | ||||||
|  |  | ||||||
|             key = '.'.join(parts) |             key = '.'.join(parts) | ||||||
|  |  | ||||||
|             if not op: |             if not op: | ||||||
|                 raise InvalidQueryError("Updates must supply an operation eg: set__FIELD=value") |                 raise InvalidQueryError("Updates must supply an operation " | ||||||
|  |                                         "eg: set__FIELD=value") | ||||||
|  |  | ||||||
|             if 'pull' in op and '.' in key: |             if 'pull' in op and '.' in key: | ||||||
|                 # Dot operators don't work on pull operations |                 # Dot operators don't work on pull operations | ||||||
|                 # it uses nested dict syntax |                 # it uses nested dict syntax | ||||||
|                 if op == 'pullAll': |                 if op == 'pullAll': | ||||||
|                     raise InvalidQueryError("pullAll operations only support a single field depth") |                     raise InvalidQueryError("pullAll operations only support " | ||||||
|  |                                             "a single field depth") | ||||||
|  |  | ||||||
|                 parts.reverse() |                 parts.reverse() | ||||||
|                 for key in parts: |                 for key in parts: | ||||||
|                     value = {key: value} |                     value = {key: value} | ||||||
|  |             elif op == 'addToSet' and isinstance(value, list): | ||||||
|  |                 value = {key: {"$each": value}} | ||||||
|             else: |             else: | ||||||
|                 value = {key: value} |                 value = {key: value} | ||||||
|             key = '$' + op |             key = '$' + op | ||||||
| @@ -1710,10 +1782,11 @@ class QuerySet(object): | |||||||
|     def _item_frequencies_map_reduce(self, field, normalize=False): |     def _item_frequencies_map_reduce(self, field, normalize=False): | ||||||
|         map_func = """ |         map_func = """ | ||||||
|             function() { |             function() { | ||||||
|                 path = '{{~%(field)s}}'.split('.'); |                 var path = '{{~%(field)s}}'.split('.'); | ||||||
|                 field = this; |                 var field = this; | ||||||
|  |  | ||||||
|                 for (p in path) { |                 for (p in path) { | ||||||
|                     if (field) |                     if (typeof field != 'undefined') | ||||||
|                        field = field[path[p]]; |                        field = field[path[p]]; | ||||||
|                     else |                     else | ||||||
|                        break; |                        break; | ||||||
| @@ -1722,7 +1795,7 @@ class QuerySet(object): | |||||||
|                     field.forEach(function(item) { |                     field.forEach(function(item) { | ||||||
|                         emit(item, 1); |                         emit(item, 1); | ||||||
|                     }); |                     }); | ||||||
|                 } else if (field) { |                 } else if (typeof field != 'undefined') { | ||||||
|                     emit(field, 1); |                     emit(field, 1); | ||||||
|                 } else { |                 } else { | ||||||
|                     emit(null, 1); |                     emit(null, 1); | ||||||
| @@ -1746,12 +1819,12 @@ class QuerySet(object): | |||||||
|             if isinstance(key, float): |             if isinstance(key, float): | ||||||
|                 if int(key) == key: |                 if int(key) == key: | ||||||
|                     key = int(key) |                     key = int(key) | ||||||
|                 key = str(key) |             frequencies[key] = int(f.value) | ||||||
|             frequencies[key] = f.value |  | ||||||
|  |  | ||||||
|         if normalize: |         if normalize: | ||||||
|             count = sum(frequencies.values()) |             count = sum(frequencies.values()) | ||||||
|             frequencies = dict([(k, v / count) for k, v in frequencies.items()]) |             frequencies = dict([(k, float(v) / count) | ||||||
|  |                                 for k, v in frequencies.items()]) | ||||||
|  |  | ||||||
|         return frequencies |         return frequencies | ||||||
|  |  | ||||||
| @@ -1759,31 +1832,28 @@ class QuerySet(object): | |||||||
|         """Uses exec_js to execute""" |         """Uses exec_js to execute""" | ||||||
|         freq_func = """ |         freq_func = """ | ||||||
|             function(path) { |             function(path) { | ||||||
|                 path = path.split('.'); |                 var path = path.split('.'); | ||||||
|  |  | ||||||
|                 if (options.normalize) { |                 var total = 0.0; | ||||||
|                     var total = 0.0; |                 db[collection].find(query).forEach(function(doc) { | ||||||
|                     db[collection].find(query).forEach(function(doc) { |                     var field = doc; | ||||||
|                         field = doc; |                     for (p in path) { | ||||||
|                         for (p in path) { |                         if (field) | ||||||
|                             if (field) |                             field = field[path[p]]; | ||||||
|                                 field = field[path[p]]; |                          else | ||||||
|                             else |                             break; | ||||||
|                                 break; |                     } | ||||||
|                         } |                     if (field && field.constructor == Array) { | ||||||
|                         if (field && field.constructor == Array) { |                        total += field.length; | ||||||
|                             total += field.length; |                     } else { | ||||||
|                         } else { |                        total++; | ||||||
|                             total++; |                     } | ||||||
|                         } |                 }); | ||||||
|                     }); |  | ||||||
|                 } |  | ||||||
|  |  | ||||||
|                 var frequencies = {}; |                 var frequencies = {}; | ||||||
|  |                 var types = {}; | ||||||
|                 var inc = 1.0; |                 var inc = 1.0; | ||||||
|                 if (options.normalize) { |  | ||||||
|                     inc /= total; |  | ||||||
|                 } |  | ||||||
|                 db[collection].find(query).forEach(function(doc) { |                 db[collection].find(query).forEach(function(doc) { | ||||||
|                     field = doc; |                     field = doc; | ||||||
|                     for (p in path) { |                     for (p in path) { | ||||||
| @@ -1798,17 +1868,28 @@ class QuerySet(object): | |||||||
|                         }); |                         }); | ||||||
|                     } else { |                     } else { | ||||||
|                         var item = field; |                         var item = field; | ||||||
|  |                         types[item] = item; | ||||||
|                         frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]); |                         frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]); | ||||||
|                     } |                     } | ||||||
|                 }); |                 }); | ||||||
|                 return frequencies; |                 return [total, frequencies, types]; | ||||||
|             } |             } | ||||||
|         """ |         """ | ||||||
|         data = self.exec_js(freq_func, field, normalize=normalize) |         total, data, types = self.exec_js(freq_func, field) | ||||||
|         if 'undefined' in data: |         values = dict([(types.get(k), int(v)) for k, v in data.iteritems()]) | ||||||
|             data[None] = data['undefined'] |  | ||||||
|             del(data['undefined']) |         if normalize: | ||||||
|         return data |             values = dict([(k, float(v) / total) for k, v in values.items()]) | ||||||
|  |  | ||||||
|  |         frequencies = {} | ||||||
|  |         for k, v in values.iteritems(): | ||||||
|  |             if isinstance(k, float): | ||||||
|  |                 if int(k) == k: | ||||||
|  |                     k = int(k) | ||||||
|  |  | ||||||
|  |             frequencies[k] = v | ||||||
|  |  | ||||||
|  |         return frequencies | ||||||
|  |  | ||||||
|     def __repr__(self): |     def __repr__(self): | ||||||
|         """Provides the string representation of the QuerySet |         """Provides the string representation of the QuerySet | ||||||
| @@ -1837,13 +1918,30 @@ class QuerySet(object): | |||||||
|  |  | ||||||
|         .. versionadded:: 0.5 |         .. versionadded:: 0.5 | ||||||
|         """ |         """ | ||||||
|         from dereference import DeReference |  | ||||||
|         # Make select related work the same for querysets |         # Make select related work the same for querysets | ||||||
|         max_depth += 1 |         max_depth += 1 | ||||||
|         return DeReference()(self, max_depth=max_depth) |         return self._dereference(self, max_depth=max_depth) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def _dereference(self): | ||||||
|  |         if not self.__dereference: | ||||||
|  |             from dereference import DeReference | ||||||
|  |             self.__dereference = DeReference()  # Cached | ||||||
|  |         return self.__dereference | ||||||
|  |  | ||||||
|  |  | ||||||
| class QuerySetManager(object): | class QuerySetManager(object): | ||||||
|  |     """ | ||||||
|  |     The default QuerySet Manager. | ||||||
|  |  | ||||||
|  |     Custom QuerySet Manager functions can extend this class and users can | ||||||
|  |     add extra queryset functionality.  Any custom manager methods must accept a | ||||||
|  |     :class:`~mongoengine.Document` class as its first argument, and a | ||||||
|  |     :class:`~mongoengine.queryset.QuerySet` as its second argument. | ||||||
|  |  | ||||||
|  |     The method function should return a :class:`~mongoengine.queryset.QuerySet` | ||||||
|  |     , probably the same one that was passed in, but modified in some way. | ||||||
|  |     """ | ||||||
|  |  | ||||||
|     get_queryset = None |     get_queryset = None | ||||||
|  |  | ||||||
| @@ -1861,13 +1959,16 @@ class QuerySetManager(object): | |||||||
|             return self |             return self | ||||||
|  |  | ||||||
|         # owner is the document that contains the QuerySetManager |         # owner is the document that contains the QuerySetManager | ||||||
|         queryset_class = owner._meta['queryset_class'] or QuerySet |         queryset_class = owner._meta.get('queryset_class') or QuerySet | ||||||
|         queryset = queryset_class(owner, owner._get_collection()) |         queryset = queryset_class(owner, owner._get_collection()) | ||||||
|         if self.get_queryset: |         if self.get_queryset: | ||||||
|             if self.get_queryset.func_code.co_argcount == 1: |             arg_count = self.get_queryset.func_code.co_argcount | ||||||
|  |             if arg_count == 1: | ||||||
|                 queryset = self.get_queryset(queryset) |                 queryset = self.get_queryset(queryset) | ||||||
|             else: |             elif arg_count == 2: | ||||||
|                 queryset = self.get_queryset(owner, queryset) |                 queryset = self.get_queryset(owner, queryset) | ||||||
|  |             else: | ||||||
|  |                 queryset = partial(self.get_queryset, owner, queryset) | ||||||
|         return queryset |         return queryset | ||||||
|  |  | ||||||
|  |  | ||||||
|   | |||||||
| @@ -5,7 +5,7 @@ | |||||||
| %define srcname mongoengine | %define srcname mongoengine | ||||||
|  |  | ||||||
| Name:           python-%{srcname} | Name:           python-%{srcname} | ||||||
| Version:        0.6.13 | Version:        0.7.5 | ||||||
| Release:        1%{?dist} | Release:        1%{?dist} | ||||||
| Summary:        A Python Document-Object Mapper for working with MongoDB | Summary:        A Python Document-Object Mapper for working with MongoDB | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										16
									
								
								setup.cfg
									
									
									
									
									
								
							
							
						
						
									
										16
									
								
								setup.cfg
									
									
									
									
									
								
							| @@ -1,13 +1,11 @@ | |||||||
| [aliases] |  | ||||||
| test = nosetests |  | ||||||
|  |  | ||||||
| [nosetests] | [nosetests] | ||||||
| verbosity = 2 | verbosity = 3 | ||||||
| detailed-errors = 1 | detailed-errors = 1 | ||||||
| #with-coverage = 1 | #with-coverage = 1 | ||||||
| cover-html = 1 | #cover-erase = 1 | ||||||
| cover-html-dir = ../htmlcov | #cover-html = 1 | ||||||
| cover-package = mongoengine | #cover-html-dir = ../htmlcov | ||||||
| cover-erase = 1 | #cover-package = mongoengine | ||||||
|  | py3where = build | ||||||
| where = tests | where = tests | ||||||
| #tests = test_bugfix.py | #tests =  test_bugfix.py | ||||||
							
								
								
									
										49
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										49
									
								
								setup.py
									
									
									
									
									
								
							| @@ -1,27 +1,35 @@ | |||||||
| from setuptools import setup, find_packages |  | ||||||
| import os | import os | ||||||
|  | import sys | ||||||
|  | from setuptools import setup, find_packages | ||||||
|  |  | ||||||
| DESCRIPTION = "A Python Document-Object Mapper for working with MongoDB" | # Hack to silence atexit traceback in newer python versions | ||||||
|  | try: | ||||||
|  |     import multiprocessing | ||||||
|  | except ImportError: | ||||||
|  |     pass | ||||||
|  |  | ||||||
|  | DESCRIPTION = """MongoEngine is a Python Object-Document | ||||||
|  | Mapper for working with MongoDB.""" | ||||||
| LONG_DESCRIPTION = None | LONG_DESCRIPTION = None | ||||||
| try: | try: | ||||||
|     LONG_DESCRIPTION = open('README.rst').read() |     LONG_DESCRIPTION = open('README.rst').read() | ||||||
| except: | except: | ||||||
|     pass |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_version(version_tuple): | def get_version(version_tuple): | ||||||
|     version = '%s.%s' % (version_tuple[0], version_tuple[1]) |     if not isinstance(version_tuple[-1], int): | ||||||
|     if version_tuple[2]: |         return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1] | ||||||
|         version = '%s.%s' % (version, version_tuple[2]) |     return '.'.join(map(str, version_tuple)) | ||||||
|     return version |  | ||||||
|  |  | ||||||
| # Dirty hack to get version number from monogengine/__init__.py - we can't | # Dirty hack to get version number from monogengine/__init__.py - we can't | ||||||
| # import it as it depends on PyMongo and PyMongo isn't installed until this | # import it as it depends on PyMongo and PyMongo isn't installed until this | ||||||
| # file is read | # file is read | ||||||
| init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py') | init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py') | ||||||
| version_line = filter(lambda l: l.startswith('VERSION'), open(init))[0] | version_line = list(filter(lambda l: l.startswith('VERSION'), open(init)))[0] | ||||||
|  |  | ||||||
| VERSION = get_version(eval(version_line.split('=')[-1])) | VERSION = get_version(eval(version_line.split('=')[-1])) | ||||||
| print VERSION | print(VERSION) | ||||||
|  |  | ||||||
| CLASSIFIERS = [ | CLASSIFIERS = [ | ||||||
|     'Development Status :: 4 - Beta', |     'Development Status :: 4 - Beta', | ||||||
| @@ -29,18 +37,38 @@ CLASSIFIERS = [ | |||||||
|     'License :: OSI Approved :: MIT License', |     'License :: OSI Approved :: MIT License', | ||||||
|     'Operating System :: OS Independent', |     'Operating System :: OS Independent', | ||||||
|     'Programming Language :: Python', |     'Programming Language :: Python', | ||||||
|  |     "Programming Language :: Python :: 2", | ||||||
|  |     "Programming Language :: Python :: 2.5", | ||||||
|  |     "Programming Language :: Python :: 2.6", | ||||||
|  |     "Programming Language :: Python :: 2.7", | ||||||
|  |     "Programming Language :: Python :: 3", | ||||||
|  |     "Programming Language :: Python :: 3.1", | ||||||
|  |     "Programming Language :: Python :: 3.2", | ||||||
|  |     "Programming Language :: Python :: Implementation :: CPython", | ||||||
|     'Topic :: Database', |     'Topic :: Database', | ||||||
|     'Topic :: Software Development :: Libraries :: Python Modules', |     'Topic :: Software Development :: Libraries :: Python Modules', | ||||||
| ] | ] | ||||||
|  |  | ||||||
|  | extra_opts = {} | ||||||
|  | if sys.version_info[0] == 3: | ||||||
|  |     extra_opts['use_2to3'] = True | ||||||
|  |     extra_opts['tests_require'] = ['nose', 'coverage', 'blinker'] | ||||||
|  |     extra_opts['packages'] = find_packages(exclude=('tests',)) | ||||||
|  |     if "test" in sys.argv or "nosetests" in sys.argv: | ||||||
|  |         extra_opts['packages'].append("tests") | ||||||
|  |         extra_opts['package_data'] = {"tests": ["mongoengine.png"]} | ||||||
|  | else: | ||||||
|  |     extra_opts['tests_require'] = ['nose', 'coverage', 'blinker', 'django>=1.3', 'PIL'] | ||||||
|  |     extra_opts['packages'] = find_packages(exclude=('tests',)) | ||||||
|  |  | ||||||
| setup(name='mongoengine', | setup(name='mongoengine', | ||||||
|       version=VERSION, |       version=VERSION, | ||||||
|       packages=find_packages(), |  | ||||||
|       author='Harry Marr', |       author='Harry Marr', | ||||||
|       author_email='harry.marr@{nospam}gmail.com', |       author_email='harry.marr@{nospam}gmail.com', | ||||||
|       maintainer="Ross Lawley", |       maintainer="Ross Lawley", | ||||||
|       maintainer_email="ross.lawley@{nospam}gmail.com", |       maintainer_email="ross.lawley@{nospam}gmail.com", | ||||||
|       url='http://mongoengine.org/', |       url='http://mongoengine.org/', | ||||||
|  |       download_url='https://github.com/MongoEngine/mongoengine/tarball/master', | ||||||
|       license='MIT', |       license='MIT', | ||||||
|       include_package_data=True, |       include_package_data=True, | ||||||
|       description=DESCRIPTION, |       description=DESCRIPTION, | ||||||
| @@ -48,5 +76,6 @@ setup(name='mongoengine', | |||||||
|       platforms=['any'], |       platforms=['any'], | ||||||
|       classifiers=CLASSIFIERS, |       classifiers=CLASSIFIERS, | ||||||
|       install_requires=['pymongo'], |       install_requires=['pymongo'], | ||||||
|       tests_require=['nose', 'coverage', 'blinker', 'django>=1.3', 'PIL'] |       test_suite='nose.collector', | ||||||
|  |       **extra_opts | ||||||
| ) | ) | ||||||
|   | |||||||
							
								
								
									
										96
									
								
								tests/test_all_warnings.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										96
									
								
								tests/test_all_warnings.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,96 @@ | |||||||
|  | import unittest | ||||||
|  | import warnings | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  | from mongoengine.tests import query_counter | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class TestWarnings(unittest.TestCase): | ||||||
|  |  | ||||||
|  |     def setUp(self): | ||||||
|  |         conn = connect(db='mongoenginetest') | ||||||
|  |         self.warning_list = [] | ||||||
|  |         self.showwarning_default = warnings.showwarning | ||||||
|  |         warnings.showwarning = self.append_to_warning_list | ||||||
|  |  | ||||||
|  |     def append_to_warning_list(self, message, category, *args): | ||||||
|  |         self.warning_list.append({"message": message, | ||||||
|  |                                   "category": category}) | ||||||
|  |  | ||||||
|  |     def tearDown(self): | ||||||
|  |         # restore default handling of warnings | ||||||
|  |         warnings.showwarning = self.showwarning_default | ||||||
|  |  | ||||||
|  |     def test_allow_inheritance_future_warning(self): | ||||||
|  |         """Add FutureWarning for future allow_inhertiance default change. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class SimpleBase(Document): | ||||||
|  |             a = IntField() | ||||||
|  |  | ||||||
|  |         class InheritedClass(SimpleBase): | ||||||
|  |             b = IntField() | ||||||
|  |  | ||||||
|  |         InheritedClass() | ||||||
|  |         self.assertEqual(len(self.warning_list), 1) | ||||||
|  |         warning = self.warning_list[0] | ||||||
|  |         self.assertEqual(FutureWarning, warning["category"]) | ||||||
|  |         self.assertTrue("InheritedClass" in str(warning["message"])) | ||||||
|  |  | ||||||
|  |     def test_dbref_reference_field_future_warning(self): | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             parent = ReferenceField('self') | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         p1 = Person() | ||||||
|  |         p1.parent = None | ||||||
|  |         p1.save() | ||||||
|  |  | ||||||
|  |         p2 = Person(name="Wilson Jr") | ||||||
|  |         p2.parent = p1 | ||||||
|  |         p2.save(cascade=False) | ||||||
|  |  | ||||||
|  |         self.assertEqual(len(self.warning_list), 1) | ||||||
|  |         warning = self.warning_list[0] | ||||||
|  |         self.assertEqual(FutureWarning, warning["category"]) | ||||||
|  |         self.assertTrue("ReferenceFields will default to using ObjectId" | ||||||
|  |                         in str(warning["message"])) | ||||||
|  |  | ||||||
|  |     def test_document_save_cascade_future_warning(self): | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             parent = ReferenceField('self') | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         p1 = Person(name="Wilson Snr") | ||||||
|  |         p1.parent = None | ||||||
|  |         p1.save() | ||||||
|  |  | ||||||
|  |         p2 = Person(name="Wilson Jr") | ||||||
|  |         p2.parent = p1 | ||||||
|  |         p2.parent.name = "Poppa Wilson" | ||||||
|  |         p2.save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(len(self.warning_list), 1) | ||||||
|  |         warning = self.warning_list[0] | ||||||
|  |         self.assertEqual(FutureWarning, warning["category"]) | ||||||
|  |         self.assertTrue("Cascading saves will default to off in 0.8" | ||||||
|  |                         in str(warning["message"])) | ||||||
|  |  | ||||||
|  |     def test_document_collection_syntax_warning(self): | ||||||
|  |  | ||||||
|  |         class NonAbstractBase(Document): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class InheritedDocumentFailTest(NonAbstractBase): | ||||||
|  |             meta = {'collection': 'fail'} | ||||||
|  |  | ||||||
|  |         warning = self.warning_list[0] | ||||||
|  |         self.assertEqual(SyntaxWarning, warning["category"]) | ||||||
|  |         self.assertEqual('non_abstract_base', | ||||||
|  |                          InheritedDocumentFailTest._get_collection_name()) | ||||||
| @@ -1,5 +1,8 @@ | |||||||
|  | from __future__ import with_statement | ||||||
| import unittest | import unittest | ||||||
|  |  | ||||||
|  | from bson import DBRef, ObjectId | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| from mongoengine.connection import get_db | from mongoengine.connection import get_db | ||||||
| from mongoengine.tests import query_counter | from mongoengine.tests import query_counter | ||||||
| @@ -63,6 +66,131 @@ class FieldTest(unittest.TestCase): | |||||||
|         User.drop_collection() |         User.drop_collection() | ||||||
|         Group.drop_collection() |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_list_item_dereference_dref_false(self): | ||||||
|  |         """Ensure that DBRef items in ListFields are dereferenced. | ||||||
|  |         """ | ||||||
|  |         class User(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Group(Document): | ||||||
|  |             members = ListField(ReferenceField(User, dbref=False)) | ||||||
|  |  | ||||||
|  |         User.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |         for i in xrange(1, 51): | ||||||
|  |             user = User(name='user %s' % i) | ||||||
|  |             user.save() | ||||||
|  |  | ||||||
|  |         group = Group(members=User.objects) | ||||||
|  |         group.save() | ||||||
|  |         group.reload()  # Confirm reload works | ||||||
|  |  | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_obj = Group.objects.first() | ||||||
|  |             self.assertEqual(q, 1) | ||||||
|  |  | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |         # Document select_related | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             group_obj = Group.objects.first().select_related() | ||||||
|  |  | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |             [m for m in group_obj.members] | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |         # Queryset select_related | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |             group_objs = Group.objects.select_related() | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |             for group_obj in group_objs: | ||||||
|  |                 [m for m in group_obj.members] | ||||||
|  |                 self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |         User.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_handle_old_style_references(self): | ||||||
|  |         """Ensure that DBRef items in ListFields are dereferenced. | ||||||
|  |         """ | ||||||
|  |         class User(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Group(Document): | ||||||
|  |             members = ListField(ReferenceField(User, dbref=True)) | ||||||
|  |  | ||||||
|  |         User.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |         for i in xrange(1, 26): | ||||||
|  |             user = User(name='user %s' % i) | ||||||
|  |             user.save() | ||||||
|  |  | ||||||
|  |         group = Group(members=User.objects) | ||||||
|  |         group.save() | ||||||
|  |  | ||||||
|  |         group = Group._get_collection().find_one() | ||||||
|  |  | ||||||
|  |         # Update the model to change the reference | ||||||
|  |         class Group(Document): | ||||||
|  |             members = ListField(ReferenceField(User, dbref=False)) | ||||||
|  |  | ||||||
|  |         group = Group.objects.first() | ||||||
|  |         group.members.append(User(name="String!").save()) | ||||||
|  |         group.save() | ||||||
|  |  | ||||||
|  |         group = Group.objects.first() | ||||||
|  |         self.assertEqual(group.members[0].name, 'user 1') | ||||||
|  |         self.assertEqual(group.members[-1].name, 'String!') | ||||||
|  |  | ||||||
|  |     def test_migrate_references(self): | ||||||
|  |         """Example of migrating ReferenceField storage | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         # Create some sample data | ||||||
|  |         class User(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Group(Document): | ||||||
|  |             author = ReferenceField(User, dbref=True) | ||||||
|  |             members = ListField(ReferenceField(User, dbref=True)) | ||||||
|  |  | ||||||
|  |         User.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |         user = User(name="Ross").save() | ||||||
|  |         group = Group(author=user, members=[user]).save() | ||||||
|  |  | ||||||
|  |         raw_data = Group._get_collection().find_one() | ||||||
|  |         self.assertTrue(isinstance(raw_data['author'], DBRef)) | ||||||
|  |         self.assertTrue(isinstance(raw_data['members'][0], DBRef)) | ||||||
|  |  | ||||||
|  |         # Migrate the model definition | ||||||
|  |         class Group(Document): | ||||||
|  |             author = ReferenceField(User, dbref=False) | ||||||
|  |             members = ListField(ReferenceField(User, dbref=False)) | ||||||
|  |  | ||||||
|  |         # Migrate the data | ||||||
|  |         for g in Group.objects(): | ||||||
|  |             g.author = g.author | ||||||
|  |             g.members = g.members | ||||||
|  |             g.save() | ||||||
|  |  | ||||||
|  |         group = Group.objects.first() | ||||||
|  |         self.assertEqual(group.author, user) | ||||||
|  |         self.assertEqual(group.members, [user]) | ||||||
|  |  | ||||||
|  |         raw_data = Group._get_collection().find_one() | ||||||
|  |         self.assertTrue(isinstance(raw_data['author'], ObjectId)) | ||||||
|  |         self.assertTrue(isinstance(raw_data['members'][0], ObjectId)) | ||||||
|  |  | ||||||
|     def test_recursive_reference(self): |     def test_recursive_reference(self): | ||||||
|         """Ensure that ReferenceFields can reference their own documents. |         """Ensure that ReferenceFields can reference their own documents. | ||||||
|         """ |         """ | ||||||
| @@ -109,10 +237,10 @@ class FieldTest(unittest.TestCase): | |||||||
|             peter = Employee.objects.with_id(peter.id).select_related() |             peter = Employee.objects.with_id(peter.id).select_related() | ||||||
|             self.assertEqual(q, 2) |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|             self.assertEquals(peter.boss, bill) |             self.assertEqual(peter.boss, bill) | ||||||
|             self.assertEqual(q, 2) |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|             self.assertEquals(peter.friends, friends) |             self.assertEqual(peter.friends, friends) | ||||||
|             self.assertEqual(q, 2) |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|         # Queryset select_related |         # Queryset select_related | ||||||
| @@ -123,10 +251,10 @@ class FieldTest(unittest.TestCase): | |||||||
|             self.assertEqual(q, 2) |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|             for employee in employees: |             for employee in employees: | ||||||
|                 self.assertEquals(employee.boss, bill) |                 self.assertEqual(employee.boss, bill) | ||||||
|                 self.assertEqual(q, 2) |                 self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|                 self.assertEquals(employee.friends, friends) |                 self.assertEqual(employee.friends, friends) | ||||||
|                 self.assertEqual(q, 2) |                 self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|     def test_circular_reference(self): |     def test_circular_reference(self): | ||||||
| @@ -160,7 +288,7 @@ class FieldTest(unittest.TestCase): | |||||||
|         daughter.relations.append(self_rel) |         daughter.relations.append(self_rel) | ||||||
|         daughter.save() |         daughter.save() | ||||||
|  |  | ||||||
|         self.assertEquals("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects()) |         self.assertEqual("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects()) | ||||||
|  |  | ||||||
|     def test_circular_reference_on_self(self): |     def test_circular_reference_on_self(self): | ||||||
|         """Ensure you can handle circular references |         """Ensure you can handle circular references | ||||||
| @@ -186,7 +314,7 @@ class FieldTest(unittest.TestCase): | |||||||
|         daughter.relations.append(daughter) |         daughter.relations.append(daughter) | ||||||
|         daughter.save() |         daughter.save() | ||||||
|  |  | ||||||
|         self.assertEquals("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects()) |         self.assertEqual("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects()) | ||||||
|  |  | ||||||
|     def test_circular_tree_reference(self): |     def test_circular_tree_reference(self): | ||||||
|         """Ensure you can handle circular references with more than one level |         """Ensure you can handle circular references with more than one level | ||||||
| @@ -228,7 +356,7 @@ class FieldTest(unittest.TestCase): | |||||||
|         anna.other.name = "Anna's friends" |         anna.other.name = "Anna's friends" | ||||||
|         anna.save() |         anna.save() | ||||||
|  |  | ||||||
|         self.assertEquals( |         self.assertEqual( | ||||||
|             "[<Person: Paul>, <Person: Maria>, <Person: Julia>, <Person: Anna>]", |             "[<Person: Paul>, <Person: Maria>, <Person: Julia>, <Person: Anna>]", | ||||||
|             "%s" % Person.objects() |             "%s" % Person.objects() | ||||||
|         ) |         ) | ||||||
| @@ -781,8 +909,8 @@ class FieldTest(unittest.TestCase): | |||||||
|         root.save() |         root.save() | ||||||
|  |  | ||||||
|         root = root.reload() |         root = root.reload() | ||||||
|         self.assertEquals(root.children, [company]) |         self.assertEqual(root.children, [company]) | ||||||
|         self.assertEquals(company.parents, [root]) |         self.assertEqual(company.parents, [root]) | ||||||
|  |  | ||||||
|     def test_dict_in_dbref_instance(self): |     def test_dict_in_dbref_instance(self): | ||||||
|  |  | ||||||
| @@ -808,8 +936,8 @@ class FieldTest(unittest.TestCase): | |||||||
|         room_101.save() |         room_101.save() | ||||||
|  |  | ||||||
|         room = Room.objects.first().select_related() |         room = Room.objects.first().select_related() | ||||||
|         self.assertEquals(room.staffs_with_position[0]['staff'], sarah) |         self.assertEqual(room.staffs_with_position[0]['staff'], sarah) | ||||||
|         self.assertEquals(room.staffs_with_position[1]['staff'], bob) |         self.assertEqual(room.staffs_with_position[1]['staff'], bob) | ||||||
|  |  | ||||||
|     def test_document_reload_no_inheritance(self): |     def test_document_reload_no_inheritance(self): | ||||||
|         class Foo(Document): |         class Foo(Document): | ||||||
| @@ -839,5 +967,27 @@ class FieldTest(unittest.TestCase): | |||||||
|         foo.save() |         foo.save() | ||||||
|         foo.reload() |         foo.reload() | ||||||
|  |  | ||||||
|         self.assertEquals(type(foo.bar), Bar) |         self.assertEqual(type(foo.bar), Bar) | ||||||
|         self.assertEquals(type(foo.baz), Baz) |         self.assertEqual(type(foo.baz), Baz) | ||||||
|  |  | ||||||
|  |     def test_list_lookup_not_checked_in_map(self): | ||||||
|  |         """Ensure we dereference list data correctly | ||||||
|  |         """ | ||||||
|  |         class Comment(Document): | ||||||
|  |             id = IntField(primary_key=True) | ||||||
|  |             text = StringField() | ||||||
|  |  | ||||||
|  |         class Message(Document): | ||||||
|  |             id = IntField(primary_key=True) | ||||||
|  |             comments = ListField(ReferenceField(Comment)) | ||||||
|  |  | ||||||
|  |         Comment.drop_collection() | ||||||
|  |         Message.drop_collection() | ||||||
|  |  | ||||||
|  |         c1 = Comment(id=0, text='zero').save() | ||||||
|  |         c2 = Comment(id=1, text='one').save() | ||||||
|  |         Message(id=1, comments=[c1, c2]).save() | ||||||
|  |  | ||||||
|  |         msg = Message.objects.get(id=1) | ||||||
|  |         self.assertEqual(0, msg.comments[0].id) | ||||||
|  |         self.assertEqual(1, msg.comments[1].id) | ||||||
|   | |||||||
| @@ -1,24 +1,34 @@ | |||||||
| # -*- coding: utf-8 -*- | from __future__ import with_statement | ||||||
|  |  | ||||||
| import unittest | import unittest | ||||||
|  | from nose.plugins.skip import SkipTest | ||||||
|  | from mongoengine.python_support import PY3 | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| from mongoengine.django.shortcuts import get_document_or_404 |  | ||||||
|  |  | ||||||
| from django.http import Http404 | try: | ||||||
| from django.template import Context, Template |     from mongoengine.django.shortcuts import get_document_or_404 | ||||||
| from django.conf import settings |  | ||||||
| from django.core.paginator import Paginator |  | ||||||
|  |  | ||||||
| settings.configure() |     from django.http import Http404 | ||||||
|  |     from django.template import Context, Template | ||||||
|  |     from django.conf import settings | ||||||
|  |     from django.core.paginator import Paginator | ||||||
|  |  | ||||||
| from django.contrib.sessions.tests import SessionTestsMixin |     settings.configure() | ||||||
| from mongoengine.django.sessions import SessionStore, MongoSession |  | ||||||
|  |     from django.contrib.sessions.tests import SessionTestsMixin | ||||||
|  |     from mongoengine.django.sessions import SessionStore, MongoSession | ||||||
|  | except Exception, err: | ||||||
|  |     if PY3: | ||||||
|  |         SessionTestsMixin = type  # dummy value so no error | ||||||
|  |         SessionStore = None  # dummy value so no error | ||||||
|  |     else: | ||||||
|  |         raise err | ||||||
|  |  | ||||||
|  |  | ||||||
| class QuerySetTest(unittest.TestCase): | class QuerySetTest(unittest.TestCase): | ||||||
|  |  | ||||||
|     def setUp(self): |     def setUp(self): | ||||||
|  |         if PY3: | ||||||
|  |             raise SkipTest('django does not have Python 3 support') | ||||||
|         connect(db='mongoenginetest') |         connect(db='mongoenginetest') | ||||||
|  |  | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
| @@ -99,6 +109,8 @@ class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase): | |||||||
|     backend = SessionStore |     backend = SessionStore | ||||||
|  |  | ||||||
|     def setUp(self): |     def setUp(self): | ||||||
|  |         if PY3: | ||||||
|  |             raise SkipTest('django does not have Python 3 support') | ||||||
|         connect(db='mongoenginetest') |         connect(db='mongoenginetest') | ||||||
|         MongoSession.drop_collection() |         MongoSession.drop_collection() | ||||||
|         super(MongoDBSessionTest, self).setUp() |         super(MongoDBSessionTest, self).setUp() | ||||||
|   | |||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -25,14 +25,14 @@ class DynamicDocTest(unittest.TestCase): | |||||||
|         p.name = "James" |         p.name = "James" | ||||||
|         p.age = 34 |         p.age = 34 | ||||||
|  |  | ||||||
|         self.assertEquals(p.to_mongo(), |         self.assertEqual(p.to_mongo(), | ||||||
|             {"_types": ["Person"], "_cls": "Person", |             {"_types": ["Person"], "_cls": "Person", | ||||||
|              "name": "James", "age": 34} |              "name": "James", "age": 34} | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|         p.save() |         p.save() | ||||||
|  |  | ||||||
|         self.assertEquals(self.Person.objects.first().age, 34) |         self.assertEqual(self.Person.objects.first().age, 34) | ||||||
|  |  | ||||||
|         # Confirm no changes to self.Person |         # Confirm no changes to self.Person | ||||||
|         self.assertFalse(hasattr(self.Person, 'age')) |         self.assertFalse(hasattr(self.Person, 'age')) | ||||||
| @@ -40,11 +40,11 @@ class DynamicDocTest(unittest.TestCase): | |||||||
|     def test_dynamic_document_delta(self): |     def test_dynamic_document_delta(self): | ||||||
|         """Ensures simple dynamic documents can delta correctly""" |         """Ensures simple dynamic documents can delta correctly""" | ||||||
|         p = self.Person(name="James", age=34) |         p = self.Person(name="James", age=34) | ||||||
|         self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {})) |         self.assertEqual(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {})) | ||||||
|  |  | ||||||
|         p.doc = 123 |         p.doc = 123 | ||||||
|         del(p.doc) |         del(p.doc) | ||||||
|         self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {'doc': 1})) |         self.assertEqual(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {'doc': 1})) | ||||||
|  |  | ||||||
|     def test_change_scope_of_variable(self): |     def test_change_scope_of_variable(self): | ||||||
|         """Test changing the scope of a dynamic field has no adverse effects""" |         """Test changing the scope of a dynamic field has no adverse effects""" | ||||||
| @@ -58,7 +58,7 @@ class DynamicDocTest(unittest.TestCase): | |||||||
|         p.save() |         p.save() | ||||||
|  |  | ||||||
|         p = self.Person.objects.get() |         p = self.Person.objects.get() | ||||||
|         self.assertEquals(p.misc, {'hello': 'world'}) |         self.assertEqual(p.misc, {'hello': 'world'}) | ||||||
|  |  | ||||||
|     def test_delete_dynamic_field(self): |     def test_delete_dynamic_field(self): | ||||||
|         """Test deleting a dynamic field works""" |         """Test deleting a dynamic field works""" | ||||||
| @@ -73,10 +73,10 @@ class DynamicDocTest(unittest.TestCase): | |||||||
|         p.save() |         p.save() | ||||||
|  |  | ||||||
|         p = self.Person.objects.get() |         p = self.Person.objects.get() | ||||||
|         self.assertEquals(p.misc, {'hello': 'world'}) |         self.assertEqual(p.misc, {'hello': 'world'}) | ||||||
|         collection = self.db[self.Person._get_collection_name()] |         collection = self.db[self.Person._get_collection_name()] | ||||||
|         obj = collection.find_one() |         obj = collection.find_one() | ||||||
|         self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'misc', 'name']) |         self.assertEqual(sorted(obj.keys()), ['_cls', '_id', '_types', 'misc', 'name']) | ||||||
|  |  | ||||||
|         del(p.misc) |         del(p.misc) | ||||||
|         p.save() |         p.save() | ||||||
| @@ -85,7 +85,7 @@ class DynamicDocTest(unittest.TestCase): | |||||||
|         self.assertFalse(hasattr(p, 'misc')) |         self.assertFalse(hasattr(p, 'misc')) | ||||||
|  |  | ||||||
|         obj = collection.find_one() |         obj = collection.find_one() | ||||||
|         self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'name']) |         self.assertEqual(sorted(obj.keys()), ['_cls', '_id', '_types', 'name']) | ||||||
|  |  | ||||||
|     def test_dynamic_document_queries(self): |     def test_dynamic_document_queries(self): | ||||||
|         """Ensure we can query dynamic fields""" |         """Ensure we can query dynamic fields""" | ||||||
| @@ -94,10 +94,10 @@ class DynamicDocTest(unittest.TestCase): | |||||||
|         p.age = 22 |         p.age = 22 | ||||||
|         p.save() |         p.save() | ||||||
|  |  | ||||||
|         self.assertEquals(1, self.Person.objects(age=22).count()) |         self.assertEqual(1, self.Person.objects(age=22).count()) | ||||||
|         p = self.Person.objects(age=22) |         p = self.Person.objects(age=22) | ||||||
|         p = p.get() |         p = p.get() | ||||||
|         self.assertEquals(22, p.age) |         self.assertEqual(22, p.age) | ||||||
|  |  | ||||||
|     def test_complex_dynamic_document_queries(self): |     def test_complex_dynamic_document_queries(self): | ||||||
|         class Person(DynamicDocument): |         class Person(DynamicDocument): | ||||||
| @@ -117,8 +117,8 @@ class DynamicDocTest(unittest.TestCase): | |||||||
|         p2.age = 10 |         p2.age = 10 | ||||||
|         p2.save() |         p2.save() | ||||||
|  |  | ||||||
|         self.assertEquals(Person.objects(age__icontains='ten').count(), 2) |         self.assertEqual(Person.objects(age__icontains='ten').count(), 2) | ||||||
|         self.assertEquals(Person.objects(age__gte=10).count(), 1) |         self.assertEqual(Person.objects(age__gte=10).count(), 1) | ||||||
|  |  | ||||||
|     def test_complex_data_lookups(self): |     def test_complex_data_lookups(self): | ||||||
|         """Ensure you can query dynamic document dynamic fields""" |         """Ensure you can query dynamic document dynamic fields""" | ||||||
| @@ -126,7 +126,7 @@ class DynamicDocTest(unittest.TestCase): | |||||||
|         p.misc = {'hello': 'world'} |         p.misc = {'hello': 'world'} | ||||||
|         p.save() |         p.save() | ||||||
|  |  | ||||||
|         self.assertEquals(1, self.Person.objects(misc__hello='world').count()) |         self.assertEqual(1, self.Person.objects(misc__hello='world').count()) | ||||||
|  |  | ||||||
|     def test_inheritance(self): |     def test_inheritance(self): | ||||||
|         """Ensure that dynamic document plays nice with inheritance""" |         """Ensure that dynamic document plays nice with inheritance""" | ||||||
| @@ -146,8 +146,8 @@ class DynamicDocTest(unittest.TestCase): | |||||||
|         joe_bloggs.age = 20 |         joe_bloggs.age = 20 | ||||||
|         joe_bloggs.save() |         joe_bloggs.save() | ||||||
|  |  | ||||||
|         self.assertEquals(1, self.Person.objects(age=20).count()) |         self.assertEqual(1, self.Person.objects(age=20).count()) | ||||||
|         self.assertEquals(1, Employee.objects(age=20).count()) |         self.assertEqual(1, Employee.objects(age=20).count()) | ||||||
|  |  | ||||||
|         joe_bloggs = self.Person.objects.first() |         joe_bloggs = self.Person.objects.first() | ||||||
|         self.assertTrue(isinstance(joe_bloggs, Employee)) |         self.assertTrue(isinstance(joe_bloggs, Employee)) | ||||||
| @@ -170,7 +170,7 @@ class DynamicDocTest(unittest.TestCase): | |||||||
|         embedded_1.list_field = ['1', 2, {'hello': 'world'}] |         embedded_1.list_field = ['1', 2, {'hello': 'world'}] | ||||||
|         doc.embedded_field = embedded_1 |         doc.embedded_field = embedded_1 | ||||||
|  |  | ||||||
|         self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc", |         self.assertEqual(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc", | ||||||
|             "embedded_field": { |             "embedded_field": { | ||||||
|                 "_types": ['Embedded'], "_cls": "Embedded", |                 "_types": ['Embedded'], "_cls": "Embedded", | ||||||
|                 "string_field": "hello", |                 "string_field": "hello", | ||||||
| @@ -182,11 +182,11 @@ class DynamicDocTest(unittest.TestCase): | |||||||
|         doc.save() |         doc.save() | ||||||
|  |  | ||||||
|         doc = Doc.objects.first() |         doc = Doc.objects.first() | ||||||
|         self.assertEquals(doc.embedded_field.__class__, Embedded) |         self.assertEqual(doc.embedded_field.__class__, Embedded) | ||||||
|         self.assertEquals(doc.embedded_field.string_field, "hello") |         self.assertEqual(doc.embedded_field.string_field, "hello") | ||||||
|         self.assertEquals(doc.embedded_field.int_field, 1) |         self.assertEqual(doc.embedded_field.int_field, 1) | ||||||
|         self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'}) |         self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'}) | ||||||
|         self.assertEquals(doc.embedded_field.list_field, ['1', 2, {'hello': 'world'}]) |         self.assertEqual(doc.embedded_field.list_field, ['1', 2, {'hello': 'world'}]) | ||||||
|  |  | ||||||
|     def test_complex_embedded_documents(self): |     def test_complex_embedded_documents(self): | ||||||
|         """Test complex dynamic embedded documents setups""" |         """Test complex dynamic embedded documents setups""" | ||||||
| @@ -213,7 +213,7 @@ class DynamicDocTest(unittest.TestCase): | |||||||
|         embedded_1.list_field = ['1', 2, embedded_2] |         embedded_1.list_field = ['1', 2, embedded_2] | ||||||
|         doc.embedded_field = embedded_1 |         doc.embedded_field = embedded_1 | ||||||
|  |  | ||||||
|         self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc", |         self.assertEqual(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc", | ||||||
|             "embedded_field": { |             "embedded_field": { | ||||||
|                 "_types": ['Embedded'], "_cls": "Embedded", |                 "_types": ['Embedded'], "_cls": "Embedded", | ||||||
|                 "string_field": "hello", |                 "string_field": "hello", | ||||||
| @@ -230,20 +230,20 @@ class DynamicDocTest(unittest.TestCase): | |||||||
|         }) |         }) | ||||||
|         doc.save() |         doc.save() | ||||||
|         doc = Doc.objects.first() |         doc = Doc.objects.first() | ||||||
|         self.assertEquals(doc.embedded_field.__class__, Embedded) |         self.assertEqual(doc.embedded_field.__class__, Embedded) | ||||||
|         self.assertEquals(doc.embedded_field.string_field, "hello") |         self.assertEqual(doc.embedded_field.string_field, "hello") | ||||||
|         self.assertEquals(doc.embedded_field.int_field, 1) |         self.assertEqual(doc.embedded_field.int_field, 1) | ||||||
|         self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'}) |         self.assertEqual(doc.embedded_field.dict_field, {'hello': 'world'}) | ||||||
|         self.assertEquals(doc.embedded_field.list_field[0], '1') |         self.assertEqual(doc.embedded_field.list_field[0], '1') | ||||||
|         self.assertEquals(doc.embedded_field.list_field[1], 2) |         self.assertEqual(doc.embedded_field.list_field[1], 2) | ||||||
|  |  | ||||||
|         embedded_field = doc.embedded_field.list_field[2] |         embedded_field = doc.embedded_field.list_field[2] | ||||||
|  |  | ||||||
|         self.assertEquals(embedded_field.__class__, Embedded) |         self.assertEqual(embedded_field.__class__, Embedded) | ||||||
|         self.assertEquals(embedded_field.string_field, "hello") |         self.assertEqual(embedded_field.string_field, "hello") | ||||||
|         self.assertEquals(embedded_field.int_field, 1) |         self.assertEqual(embedded_field.int_field, 1) | ||||||
|         self.assertEquals(embedded_field.dict_field, {'hello': 'world'}) |         self.assertEqual(embedded_field.dict_field, {'hello': 'world'}) | ||||||
|         self.assertEquals(embedded_field.list_field, ['1', 2, {'hello': 'world'}]) |         self.assertEqual(embedded_field.list_field, ['1', 2, {'hello': 'world'}]) | ||||||
|  |  | ||||||
|     def test_delta_for_dynamic_documents(self): |     def test_delta_for_dynamic_documents(self): | ||||||
|         p = self.Person() |         p = self.Person() | ||||||
| @@ -252,18 +252,18 @@ class DynamicDocTest(unittest.TestCase): | |||||||
|         p.save() |         p.save() | ||||||
|  |  | ||||||
|         p.age = 24 |         p.age = 24 | ||||||
|         self.assertEquals(p.age, 24) |         self.assertEqual(p.age, 24) | ||||||
|         self.assertEquals(p._get_changed_fields(), ['age']) |         self.assertEqual(p._get_changed_fields(), ['age']) | ||||||
|         self.assertEquals(p._delta(), ({'age': 24}, {})) |         self.assertEqual(p._delta(), ({'age': 24}, {})) | ||||||
|  |  | ||||||
|         p = self.Person.objects(age=22).get() |         p = self.Person.objects(age=22).get() | ||||||
|         p.age = 24 |         p.age = 24 | ||||||
|         self.assertEquals(p.age, 24) |         self.assertEqual(p.age, 24) | ||||||
|         self.assertEquals(p._get_changed_fields(), ['age']) |         self.assertEqual(p._get_changed_fields(), ['age']) | ||||||
|         self.assertEquals(p._delta(), ({'age': 24}, {})) |         self.assertEqual(p._delta(), ({'age': 24}, {})) | ||||||
|  |  | ||||||
|         p.save() |         p.save() | ||||||
|         self.assertEquals(1, self.Person.objects(age=24).count()) |         self.assertEqual(1, self.Person.objects(age=24).count()) | ||||||
|  |  | ||||||
|     def test_delta(self): |     def test_delta(self): | ||||||
|  |  | ||||||
| @@ -275,40 +275,40 @@ class DynamicDocTest(unittest.TestCase): | |||||||
|         doc.save() |         doc.save() | ||||||
|  |  | ||||||
|         doc = Doc.objects.first() |         doc = Doc.objects.first() | ||||||
|         self.assertEquals(doc._get_changed_fields(), []) |         self.assertEqual(doc._get_changed_fields(), []) | ||||||
|         self.assertEquals(doc._delta(), ({}, {})) |         self.assertEqual(doc._delta(), ({}, {})) | ||||||
|  |  | ||||||
|         doc.string_field = 'hello' |         doc.string_field = 'hello' | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['string_field']) |         self.assertEqual(doc._get_changed_fields(), ['string_field']) | ||||||
|         self.assertEquals(doc._delta(), ({'string_field': 'hello'}, {})) |         self.assertEqual(doc._delta(), ({'string_field': 'hello'}, {})) | ||||||
|  |  | ||||||
|         doc._changed_fields = [] |         doc._changed_fields = [] | ||||||
|         doc.int_field = 1 |         doc.int_field = 1 | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['int_field']) |         self.assertEqual(doc._get_changed_fields(), ['int_field']) | ||||||
|         self.assertEquals(doc._delta(), ({'int_field': 1}, {})) |         self.assertEqual(doc._delta(), ({'int_field': 1}, {})) | ||||||
|  |  | ||||||
|         doc._changed_fields = [] |         doc._changed_fields = [] | ||||||
|         dict_value = {'hello': 'world', 'ping': 'pong'} |         dict_value = {'hello': 'world', 'ping': 'pong'} | ||||||
|         doc.dict_field = dict_value |         doc.dict_field = dict_value | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['dict_field']) |         self.assertEqual(doc._get_changed_fields(), ['dict_field']) | ||||||
|         self.assertEquals(doc._delta(), ({'dict_field': dict_value}, {})) |         self.assertEqual(doc._delta(), ({'dict_field': dict_value}, {})) | ||||||
|  |  | ||||||
|         doc._changed_fields = [] |         doc._changed_fields = [] | ||||||
|         list_value = ['1', 2, {'hello': 'world'}] |         list_value = ['1', 2, {'hello': 'world'}] | ||||||
|         doc.list_field = list_value |         doc.list_field = list_value | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['list_field']) |         self.assertEqual(doc._get_changed_fields(), ['list_field']) | ||||||
|         self.assertEquals(doc._delta(), ({'list_field': list_value}, {})) |         self.assertEqual(doc._delta(), ({'list_field': list_value}, {})) | ||||||
|  |  | ||||||
|         # Test unsetting |         # Test unsetting | ||||||
|         doc._changed_fields = [] |         doc._changed_fields = [] | ||||||
|         doc.dict_field = {} |         doc.dict_field = {} | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['dict_field']) |         self.assertEqual(doc._get_changed_fields(), ['dict_field']) | ||||||
|         self.assertEquals(doc._delta(), ({}, {'dict_field': 1})) |         self.assertEqual(doc._delta(), ({}, {'dict_field': 1})) | ||||||
|  |  | ||||||
|         doc._changed_fields = [] |         doc._changed_fields = [] | ||||||
|         doc.list_field = [] |         doc.list_field = [] | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['list_field']) |         self.assertEqual(doc._get_changed_fields(), ['list_field']) | ||||||
|         self.assertEquals(doc._delta(), ({}, {'list_field': 1})) |         self.assertEqual(doc._delta(), ({}, {'list_field': 1})) | ||||||
|  |  | ||||||
|     def test_delta_recursive(self): |     def test_delta_recursive(self): | ||||||
|         """Testing deltaing works with dynamic documents""" |         """Testing deltaing works with dynamic documents""" | ||||||
| @@ -323,8 +323,8 @@ class DynamicDocTest(unittest.TestCase): | |||||||
|         doc.save() |         doc.save() | ||||||
|  |  | ||||||
|         doc = Doc.objects.first() |         doc = Doc.objects.first() | ||||||
|         self.assertEquals(doc._get_changed_fields(), []) |         self.assertEqual(doc._get_changed_fields(), []) | ||||||
|         self.assertEquals(doc._delta(), ({}, {})) |         self.assertEqual(doc._delta(), ({}, {})) | ||||||
|  |  | ||||||
|         embedded_1 = Embedded() |         embedded_1 = Embedded() | ||||||
|         embedded_1.string_field = 'hello' |         embedded_1.string_field = 'hello' | ||||||
| @@ -333,7 +333,7 @@ class DynamicDocTest(unittest.TestCase): | |||||||
|         embedded_1.list_field = ['1', 2, {'hello': 'world'}] |         embedded_1.list_field = ['1', 2, {'hello': 'world'}] | ||||||
|         doc.embedded_field = embedded_1 |         doc.embedded_field = embedded_1 | ||||||
|  |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['embedded_field']) |         self.assertEqual(doc._get_changed_fields(), ['embedded_field']) | ||||||
|  |  | ||||||
|         embedded_delta = { |         embedded_delta = { | ||||||
|             'string_field': 'hello', |             'string_field': 'hello', | ||||||
| @@ -341,28 +341,28 @@ class DynamicDocTest(unittest.TestCase): | |||||||
|             'dict_field': {'hello': 'world'}, |             'dict_field': {'hello': 'world'}, | ||||||
|             'list_field': ['1', 2, {'hello': 'world'}] |             'list_field': ['1', 2, {'hello': 'world'}] | ||||||
|         } |         } | ||||||
|         self.assertEquals(doc.embedded_field._delta(), (embedded_delta, {})) |         self.assertEqual(doc.embedded_field._delta(), (embedded_delta, {})) | ||||||
|         embedded_delta.update({ |         embedded_delta.update({ | ||||||
|             '_types': ['Embedded'], |             '_types': ['Embedded'], | ||||||
|             '_cls': 'Embedded', |             '_cls': 'Embedded', | ||||||
|         }) |         }) | ||||||
|         self.assertEquals(doc._delta(), ({'embedded_field': embedded_delta}, {})) |         self.assertEqual(doc._delta(), ({'embedded_field': embedded_delta}, {})) | ||||||
|  |  | ||||||
|         doc.save() |         doc.save() | ||||||
|         doc.reload() |         doc.reload() | ||||||
|  |  | ||||||
|         doc.embedded_field.dict_field = {} |         doc.embedded_field.dict_field = {} | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['embedded_field.dict_field']) |         self.assertEqual(doc._get_changed_fields(), ['embedded_field.dict_field']) | ||||||
|         self.assertEquals(doc.embedded_field._delta(), ({}, {'dict_field': 1})) |         self.assertEqual(doc.embedded_field._delta(), ({}, {'dict_field': 1})) | ||||||
|  |  | ||||||
|         self.assertEquals(doc._delta(), ({}, {'embedded_field.dict_field': 1})) |         self.assertEqual(doc._delta(), ({}, {'embedded_field.dict_field': 1})) | ||||||
|         doc.save() |         doc.save() | ||||||
|         doc.reload() |         doc.reload() | ||||||
|  |  | ||||||
|         doc.embedded_field.list_field = [] |         doc.embedded_field.list_field = [] | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field']) |         self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field']) | ||||||
|         self.assertEquals(doc.embedded_field._delta(), ({}, {'list_field': 1})) |         self.assertEqual(doc.embedded_field._delta(), ({}, {'list_field': 1})) | ||||||
|         self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field': 1})) |         self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field': 1})) | ||||||
|         doc.save() |         doc.save() | ||||||
|         doc.reload() |         doc.reload() | ||||||
|  |  | ||||||
| @@ -373,8 +373,8 @@ class DynamicDocTest(unittest.TestCase): | |||||||
|         embedded_2.list_field = ['1', 2, {'hello': 'world'}] |         embedded_2.list_field = ['1', 2, {'hello': 'world'}] | ||||||
|  |  | ||||||
|         doc.embedded_field.list_field = ['1', 2, embedded_2] |         doc.embedded_field.list_field = ['1', 2, embedded_2] | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field']) |         self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field']) | ||||||
|         self.assertEquals(doc.embedded_field._delta(), ({ |         self.assertEqual(doc.embedded_field._delta(), ({ | ||||||
|             'list_field': ['1', 2, { |             'list_field': ['1', 2, { | ||||||
|                 '_cls': 'Embedded', |                 '_cls': 'Embedded', | ||||||
|                 '_types': ['Embedded'], |                 '_types': ['Embedded'], | ||||||
| @@ -385,7 +385,7 @@ class DynamicDocTest(unittest.TestCase): | |||||||
|             }] |             }] | ||||||
|         }, {})) |         }, {})) | ||||||
|  |  | ||||||
|         self.assertEquals(doc._delta(), ({ |         self.assertEqual(doc._delta(), ({ | ||||||
|             'embedded_field.list_field': ['1', 2, { |             'embedded_field.list_field': ['1', 2, { | ||||||
|                 '_cls': 'Embedded', |                 '_cls': 'Embedded', | ||||||
|                  '_types': ['Embedded'], |                  '_types': ['Embedded'], | ||||||
| @@ -398,25 +398,25 @@ class DynamicDocTest(unittest.TestCase): | |||||||
|         doc.save() |         doc.save() | ||||||
|         doc.reload() |         doc.reload() | ||||||
|  |  | ||||||
|         self.assertEquals(doc.embedded_field.list_field[2]._changed_fields, []) |         self.assertEqual(doc.embedded_field.list_field[2]._changed_fields, []) | ||||||
|         self.assertEquals(doc.embedded_field.list_field[0], '1') |         self.assertEqual(doc.embedded_field.list_field[0], '1') | ||||||
|         self.assertEquals(doc.embedded_field.list_field[1], 2) |         self.assertEqual(doc.embedded_field.list_field[1], 2) | ||||||
|         for k in doc.embedded_field.list_field[2]._fields: |         for k in doc.embedded_field.list_field[2]._fields: | ||||||
|             self.assertEquals(doc.embedded_field.list_field[2][k], embedded_2[k]) |             self.assertEqual(doc.embedded_field.list_field[2][k], embedded_2[k]) | ||||||
|  |  | ||||||
|         doc.embedded_field.list_field[2].string_field = 'world' |         doc.embedded_field.list_field[2].string_field = 'world' | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field']) |         self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field']) | ||||||
|         self.assertEquals(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {})) |         self.assertEqual(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {})) | ||||||
|         self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {})) |         self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {})) | ||||||
|         doc.save() |         doc.save() | ||||||
|         doc.reload() |         doc.reload() | ||||||
|         self.assertEquals(doc.embedded_field.list_field[2].string_field, 'world') |         self.assertEqual(doc.embedded_field.list_field[2].string_field, 'world') | ||||||
|  |  | ||||||
|         # Test multiple assignments |         # Test multiple assignments | ||||||
|         doc.embedded_field.list_field[2].string_field = 'hello world' |         doc.embedded_field.list_field[2].string_field = 'hello world' | ||||||
|         doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] |         doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field']) |         self.assertEqual(doc._get_changed_fields(), ['embedded_field.list_field']) | ||||||
|         self.assertEquals(doc.embedded_field._delta(), ({ |         self.assertEqual(doc.embedded_field._delta(), ({ | ||||||
|             'list_field': ['1', 2, { |             'list_field': ['1', 2, { | ||||||
|             '_types': ['Embedded'], |             '_types': ['Embedded'], | ||||||
|             '_cls': 'Embedded', |             '_cls': 'Embedded', | ||||||
| @@ -424,7 +424,7 @@ class DynamicDocTest(unittest.TestCase): | |||||||
|             'int_field': 1, |             'int_field': 1, | ||||||
|             'list_field': ['1', 2, {'hello': 'world'}], |             'list_field': ['1', 2, {'hello': 'world'}], | ||||||
|             'dict_field': {'hello': 'world'}}]}, {})) |             'dict_field': {'hello': 'world'}}]}, {})) | ||||||
|         self.assertEquals(doc._delta(), ({ |         self.assertEqual(doc._delta(), ({ | ||||||
|             'embedded_field.list_field': ['1', 2, { |             'embedded_field.list_field': ['1', 2, { | ||||||
|                 '_types': ['Embedded'], |                 '_types': ['Embedded'], | ||||||
|                 '_cls': 'Embedded', |                 '_cls': 'Embedded', | ||||||
| @@ -435,32 +435,32 @@ class DynamicDocTest(unittest.TestCase): | |||||||
|             ]}, {})) |             ]}, {})) | ||||||
|         doc.save() |         doc.save() | ||||||
|         doc.reload() |         doc.reload() | ||||||
|         self.assertEquals(doc.embedded_field.list_field[2].string_field, 'hello world') |         self.assertEqual(doc.embedded_field.list_field[2].string_field, 'hello world') | ||||||
|  |  | ||||||
|         # Test list native methods |         # Test list native methods | ||||||
|         doc.embedded_field.list_field[2].list_field.pop(0) |         doc.embedded_field.list_field[2].list_field.pop(0) | ||||||
|         self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {})) |         self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {})) | ||||||
|         doc.save() |         doc.save() | ||||||
|         doc.reload() |         doc.reload() | ||||||
|  |  | ||||||
|         doc.embedded_field.list_field[2].list_field.append(1) |         doc.embedded_field.list_field[2].list_field.append(1) | ||||||
|         self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {})) |         self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {})) | ||||||
|         doc.save() |         doc.save() | ||||||
|         doc.reload() |         doc.reload() | ||||||
|         self.assertEquals(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1]) |         self.assertEqual(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1]) | ||||||
|  |  | ||||||
|         doc.embedded_field.list_field[2].list_field.sort() |         doc.embedded_field.list_field[2].list_field.sort(key=str)# use str as a key to allow comparing uncomperable types | ||||||
|         doc.save() |         doc.save() | ||||||
|         doc.reload() |         doc.reload() | ||||||
|         self.assertEquals(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}]) |         self.assertEqual(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}]) | ||||||
|  |  | ||||||
|         del(doc.embedded_field.list_field[2].list_field[2]['hello']) |         del(doc.embedded_field.list_field[2].list_field[2]['hello']) | ||||||
|         self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {})) |         self.assertEqual(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {})) | ||||||
|         doc.save() |         doc.save() | ||||||
|         doc.reload() |         doc.reload() | ||||||
|  |  | ||||||
|         del(doc.embedded_field.list_field[2].list_field) |         del(doc.embedded_field.list_field[2].list_field) | ||||||
|         self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1})) |         self.assertEqual(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1})) | ||||||
|  |  | ||||||
|         doc.save() |         doc.save() | ||||||
|         doc.reload() |         doc.reload() | ||||||
| @@ -470,8 +470,8 @@ class DynamicDocTest(unittest.TestCase): | |||||||
|         doc.reload() |         doc.reload() | ||||||
|  |  | ||||||
|         doc.dict_field['embedded'].string_field = 'Hello World' |         doc.dict_field['embedded'].string_field = 'Hello World' | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['dict_field.embedded.string_field']) |         self.assertEqual(doc._get_changed_fields(), ['dict_field.embedded.string_field']) | ||||||
|         self.assertEquals(doc._delta(), ({'dict_field.embedded.string_field': 'Hello World'}, {})) |         self.assertEqual(doc._delta(), ({'dict_field.embedded.string_field': 'Hello World'}, {})) | ||||||
|  |  | ||||||
|     def test_indexes(self): |     def test_indexes(self): | ||||||
|         """Ensure that indexes are used when meta[indexes] is specified. |         """Ensure that indexes are used when meta[indexes] is specified. | ||||||
| @@ -500,3 +500,34 @@ class DynamicDocTest(unittest.TestCase): | |||||||
|         self.assertTrue([('_types', 1), ('category', 1), ('date', -1)] |         self.assertTrue([('_types', 1), ('category', 1), ('date', -1)] | ||||||
|                         in info) |                         in info) | ||||||
|         self.assertTrue([('_types', 1), ('date', -1)] in info) |         self.assertTrue([('_types', 1), ('date', -1)] in info) | ||||||
|  |  | ||||||
|  |     def test_dynamic_and_embedded(self): | ||||||
|  |         """Ensure embedded documents play nicely""" | ||||||
|  |  | ||||||
|  |         class Address(EmbeddedDocument): | ||||||
|  |             city = StringField() | ||||||
|  |  | ||||||
|  |         class Person(DynamicDocument): | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {'allow_inheritance': True} | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         Person(name="Ross", address=Address(city="London")).save() | ||||||
|  |  | ||||||
|  |         person = Person.objects.first() | ||||||
|  |         person.address.city = "Lundenne" | ||||||
|  |         person.save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(Person.objects.first().address.city, "Lundenne") | ||||||
|  |  | ||||||
|  |         person = Person.objects.first() | ||||||
|  |         person.address = Address(city="Londinium") | ||||||
|  |         person.save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(Person.objects.first().address.city, "Londinium") | ||||||
|  |  | ||||||
|  |         person = Person.objects.first() | ||||||
|  |         person.age = 35 | ||||||
|  |         person.save() | ||||||
|  |         self.assertEqual(Person.objects.first().age, 35) | ||||||
|   | |||||||
| @@ -1,20 +1,24 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | from __future__ import with_statement | ||||||
| import datetime | import datetime | ||||||
| import os | import os | ||||||
| import unittest | import unittest | ||||||
| import uuid | import uuid | ||||||
| import StringIO |  | ||||||
| import tempfile | import tempfile | ||||||
| import gridfs |  | ||||||
|  |  | ||||||
| from decimal import Decimal | from decimal import Decimal | ||||||
|  |  | ||||||
|  | from bson import Binary, DBRef, ObjectId | ||||||
|  | import gridfs | ||||||
|  |  | ||||||
|  | from nose.plugins.skip import SkipTest | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| from mongoengine.connection import get_db | from mongoengine.connection import get_db | ||||||
| from mongoengine.base import _document_registry, NotRegistered | from mongoengine.base import _document_registry, NotRegistered | ||||||
|  | from mongoengine.python_support import PY3, b, StringIO, bin_type | ||||||
|  |  | ||||||
| TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') | TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') | ||||||
|  |  | ||||||
|  |  | ||||||
| class FieldTest(unittest.TestCase): | class FieldTest(unittest.TestCase): | ||||||
|  |  | ||||||
|     def setUp(self): |     def setUp(self): | ||||||
| @@ -123,10 +127,23 @@ class FieldTest(unittest.TestCase): | |||||||
|         self.assertEqual(ret.int_fld, None) |         self.assertEqual(ret.int_fld, None) | ||||||
|         self.assertEqual(ret.flt_fld, None) |         self.assertEqual(ret.flt_fld, None) | ||||||
|         # Return current time if retrived value is None. |         # Return current time if retrived value is None. | ||||||
|         self.assert_(isinstance(ret.comp_dt_fld, datetime.datetime)) |         self.assertTrue(isinstance(ret.comp_dt_fld, datetime.datetime)) | ||||||
|  |  | ||||||
|         self.assertRaises(ValidationError, ret.validate) |         self.assertRaises(ValidationError, ret.validate) | ||||||
|  |  | ||||||
|  |     def test_int_and_float_ne_operator(self): | ||||||
|  |         class TestDocument(Document): | ||||||
|  |             int_fld = IntField() | ||||||
|  |             float_fld = FloatField() | ||||||
|  |  | ||||||
|  |         TestDocument.drop_collection() | ||||||
|  |  | ||||||
|  |         TestDocument(int_fld=None, float_fld=None).save() | ||||||
|  |         TestDocument(int_fld=1, float_fld=1).save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(1, TestDocument.objects(int_fld__ne=None).count()) | ||||||
|  |         self.assertEqual(1, TestDocument.objects(float_fld__ne=None).count()) | ||||||
|  |  | ||||||
|     def test_object_id_validation(self): |     def test_object_id_validation(self): | ||||||
|         """Ensure that invalid values cannot be assigned to string fields. |         """Ensure that invalid values cannot be assigned to string fields. | ||||||
|         """ |         """ | ||||||
| @@ -258,25 +275,56 @@ class FieldTest(unittest.TestCase): | |||||||
|         person.admin = 'Yes' |         person.admin = 'Yes' | ||||||
|         self.assertRaises(ValidationError, person.validate) |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |  | ||||||
|     def test_uuid_validation(self): |     def test_uuid_field_string(self): | ||||||
|         """Ensure that invalid values cannot be assigned to UUID fields. |         """Test UUID fields storing as String | ||||||
|         """ |         """ | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             api_key = UUIDField() |             api_key = UUIDField(binary=False) | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         uu = uuid.uuid4() | ||||||
|  |         Person(api_key=uu).save() | ||||||
|  |         self.assertEqual(1, Person.objects(api_key=uu).count()) | ||||||
|  |         self.assertEqual(uu, Person.objects.first().api_key) | ||||||
|  |  | ||||||
|         person = Person() |         person = Person() | ||||||
|         # any uuid type is valid |         valid = (uuid.uuid4(), uuid.uuid1()) | ||||||
|         person.api_key = uuid.uuid4() |         for api_key in valid: | ||||||
|         person.validate() |             person.api_key = api_key | ||||||
|         person.api_key = uuid.uuid1() |             person.validate() | ||||||
|         person.validate() |  | ||||||
|  |         invalid = ('9d159858-549b-4975-9f98-dd2f987c113g', | ||||||
|  |                    '9d159858-549b-4975-9f98-dd2f987c113') | ||||||
|  |         for api_key in invalid: | ||||||
|  |             person.api_key = api_key | ||||||
|  |             self.assertRaises(ValidationError, person.validate) | ||||||
|  |  | ||||||
|  |     def test_uuid_field_binary(self): | ||||||
|  |         """Test UUID fields storing as Binary object | ||||||
|  |         """ | ||||||
|  |         class Person(Document): | ||||||
|  |             api_key = UUIDField(binary=True) | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         uu = uuid.uuid4() | ||||||
|  |         Person(api_key=uu).save() | ||||||
|  |         self.assertEqual(1, Person.objects(api_key=uu).count()) | ||||||
|  |         self.assertEqual(uu, Person.objects.first().api_key) | ||||||
|  |  | ||||||
|  |         person = Person() | ||||||
|  |         valid = (uuid.uuid4(), uuid.uuid1()) | ||||||
|  |         for api_key in valid: | ||||||
|  |             person.api_key = api_key | ||||||
|  |             person.validate() | ||||||
|  |  | ||||||
|  |         invalid = ('9d159858-549b-4975-9f98-dd2f987c113g', | ||||||
|  |                    '9d159858-549b-4975-9f98-dd2f987c113') | ||||||
|  |         for api_key in invalid: | ||||||
|  |             person.api_key = api_key | ||||||
|  |             self.assertRaises(ValidationError, person.validate) | ||||||
|  |  | ||||||
|         # last g cannot belong to an hex number |  | ||||||
|         person.api_key = '9d159858-549b-4975-9f98-dd2f987c113g' |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|         # short strings don't validate |  | ||||||
|         person.api_key = '9d159858-549b-4975-9f98-dd2f987c113' |  | ||||||
|         self.assertRaises(ValidationError, person.validate) |  | ||||||
|  |  | ||||||
|     def test_datetime_validation(self): |     def test_datetime_validation(self): | ||||||
|         """Ensure that invalid values cannot be assigned to datetime fields. |         """Ensure that invalid values cannot be assigned to datetime fields. | ||||||
| @@ -313,7 +361,7 @@ class FieldTest(unittest.TestCase): | |||||||
|         log.date = datetime.date.today() |         log.date = datetime.date.today() | ||||||
|         log.save() |         log.save() | ||||||
|         log.reload() |         log.reload() | ||||||
|         self.assertEquals(log.date.date(), datetime.date.today()) |         self.assertEqual(log.date.date(), datetime.date.today()) | ||||||
|  |  | ||||||
|         LogEntry.drop_collection() |         LogEntry.drop_collection() | ||||||
|  |  | ||||||
| @@ -324,8 +372,8 @@ class FieldTest(unittest.TestCase): | |||||||
|         log.date = d1 |         log.date = d1 | ||||||
|         log.save() |         log.save() | ||||||
|         log.reload() |         log.reload() | ||||||
|         self.assertNotEquals(log.date, d1) |         self.assertNotEqual(log.date, d1) | ||||||
|         self.assertEquals(log.date, d2) |         self.assertEqual(log.date, d2) | ||||||
|  |  | ||||||
|         # Post UTC - microseconds are rounded (down) nearest millisecond |         # Post UTC - microseconds are rounded (down) nearest millisecond | ||||||
|         d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 9999) |         d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 9999) | ||||||
| @@ -333,35 +381,19 @@ class FieldTest(unittest.TestCase): | |||||||
|         log.date = d1 |         log.date = d1 | ||||||
|         log.save() |         log.save() | ||||||
|         log.reload() |         log.reload() | ||||||
|         self.assertNotEquals(log.date, d1) |         self.assertNotEqual(log.date, d1) | ||||||
|         self.assertEquals(log.date, d2) |         self.assertEqual(log.date, d2) | ||||||
|  |  | ||||||
|         # Pre UTC dates microseconds below 1000 are dropped |         if not PY3: | ||||||
|         d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) |             # Pre UTC dates microseconds below 1000 are dropped | ||||||
|         d2 = datetime.datetime(1969, 12, 31, 23, 59, 59) |             # This does not seem to be true in PY3 | ||||||
|         log.date = d1 |             d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) | ||||||
|         log.save() |             d2 = datetime.datetime(1969, 12, 31, 23, 59, 59) | ||||||
|         log.reload() |  | ||||||
|         self.assertNotEquals(log.date, d1) |  | ||||||
|         self.assertEquals(log.date, d2) |  | ||||||
|  |  | ||||||
|         # Pre UTC microseconds above 1000 is wonky. |  | ||||||
|         # log.date has an invalid microsecond value so I can't construct |  | ||||||
|         # a date to compare. |  | ||||||
|         # |  | ||||||
|         # However, the timedelta is predicable with pre UTC timestamps |  | ||||||
|         # It always adds 16 seconds and [777216-776217] microseconds |  | ||||||
|         for i in xrange(1001, 3113, 33): |  | ||||||
|             d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, i) |  | ||||||
|             log.date = d1 |             log.date = d1 | ||||||
|             log.save() |             log.save() | ||||||
|             log.reload() |             log.reload() | ||||||
|             self.assertNotEquals(log.date, d1) |             self.assertNotEqual(log.date, d1) | ||||||
|  |             self.assertEqual(log.date, d2) | ||||||
|             delta = log.date - d1 |  | ||||||
|             self.assertEquals(delta.seconds, 16) |  | ||||||
|             microseconds = 777216 - (i % 1000) |  | ||||||
|             self.assertEquals(delta.microseconds, microseconds) |  | ||||||
|  |  | ||||||
|         LogEntry.drop_collection() |         LogEntry.drop_collection() | ||||||
|  |  | ||||||
| @@ -380,21 +412,21 @@ class FieldTest(unittest.TestCase): | |||||||
|         log.date = d1 |         log.date = d1 | ||||||
|         log.save() |         log.save() | ||||||
|         log.reload() |         log.reload() | ||||||
|         self.assertEquals(log.date, d1) |         self.assertEqual(log.date, d1) | ||||||
|  |  | ||||||
|         # Post UTC - microseconds are rounded (down) nearest millisecond - with default datetimefields |         # Post UTC - microseconds are rounded (down) nearest millisecond - with default datetimefields | ||||||
|         d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 9999) |         d1 = datetime.datetime(1970, 01, 01, 00, 00, 01, 9999) | ||||||
|         log.date = d1 |         log.date = d1 | ||||||
|         log.save() |         log.save() | ||||||
|         log.reload() |         log.reload() | ||||||
|         self.assertEquals(log.date, d1) |         self.assertEqual(log.date, d1) | ||||||
|  |  | ||||||
|         # Pre UTC dates microseconds below 1000 are dropped - with default datetimefields |         # Pre UTC dates microseconds below 1000 are dropped - with default datetimefields | ||||||
|         d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) |         d1 = datetime.datetime(1969, 12, 31, 23, 59, 59, 999) | ||||||
|         log.date = d1 |         log.date = d1 | ||||||
|         log.save() |         log.save() | ||||||
|         log.reload() |         log.reload() | ||||||
|         self.assertEquals(log.date, d1) |         self.assertEqual(log.date, d1) | ||||||
|  |  | ||||||
|         # Pre UTC microseconds above 1000 is wonky - with default datetimefields |         # Pre UTC microseconds above 1000 is wonky - with default datetimefields | ||||||
|         # log.date has an invalid microsecond value so I can't construct |         # log.date has an invalid microsecond value so I can't construct | ||||||
| @@ -404,7 +436,7 @@ class FieldTest(unittest.TestCase): | |||||||
|             log.date = d1 |             log.date = d1 | ||||||
|             log.save() |             log.save() | ||||||
|             log.reload() |             log.reload() | ||||||
|             self.assertEquals(log.date, d1) |             self.assertEqual(log.date, d1) | ||||||
|             log1 = LogEntry.objects.get(date=d1) |             log1 = LogEntry.objects.get(date=d1) | ||||||
|             self.assertEqual(log, log1) |             self.assertEqual(log, log1) | ||||||
|  |  | ||||||
| @@ -425,7 +457,7 @@ class FieldTest(unittest.TestCase): | |||||||
|         log.save() |         log.save() | ||||||
|  |  | ||||||
|         log1 = LogEntry.objects.get(date=d1) |         log1 = LogEntry.objects.get(date=d1) | ||||||
|         self.assertEquals(log, log1) |         self.assertEqual(log, log1) | ||||||
|  |  | ||||||
|         LogEntry.drop_collection() |         LogEntry.drop_collection() | ||||||
|  |  | ||||||
| @@ -613,13 +645,13 @@ class FieldTest(unittest.TestCase): | |||||||
|         post.info = [{'test': 3}] |         post.info = [{'test': 3}] | ||||||
|         post.save() |         post.save() | ||||||
|  |  | ||||||
|         self.assertEquals(BlogPost.objects.count(), 3) |         self.assertEqual(BlogPost.objects.count(), 3) | ||||||
|         self.assertEquals(BlogPost.objects.filter(info__exact='test').count(), 1) |         self.assertEqual(BlogPost.objects.filter(info__exact='test').count(), 1) | ||||||
|         self.assertEquals(BlogPost.objects.filter(info__0__test='test').count(), 1) |         self.assertEqual(BlogPost.objects.filter(info__0__test='test').count(), 1) | ||||||
|  |  | ||||||
|         # Confirm handles non strings or non existing keys |         # Confirm handles non strings or non existing keys | ||||||
|         self.assertEquals(BlogPost.objects.filter(info__0__test__exact='5').count(), 0) |         self.assertEqual(BlogPost.objects.filter(info__0__test__exact='5').count(), 0) | ||||||
|         self.assertEquals(BlogPost.objects.filter(info__100__test__exact='test').count(), 0) |         self.assertEqual(BlogPost.objects.filter(info__100__test__exact='test').count(), 0) | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|     def test_list_field_passed_in_value(self): |     def test_list_field_passed_in_value(self): | ||||||
| @@ -634,7 +666,7 @@ class FieldTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         foo = Foo(bars=[]) |         foo = Foo(bars=[]) | ||||||
|         foo.bars.append(bar) |         foo.bars.append(bar) | ||||||
|         self.assertEquals(repr(foo.bars), '[<Bar: Bar object>]') |         self.assertEqual(repr(foo.bars), '[<Bar: Bar object>]') | ||||||
|  |  | ||||||
|  |  | ||||||
|     def test_list_field_strict(self): |     def test_list_field_strict(self): | ||||||
| @@ -719,20 +751,20 @@ class FieldTest(unittest.TestCase): | |||||||
|         self.assertTrue(isinstance(e2.mapping[1], IntegerSetting)) |         self.assertTrue(isinstance(e2.mapping[1], IntegerSetting)) | ||||||
|  |  | ||||||
|         # Test querying |         # Test querying | ||||||
|         self.assertEquals(Simple.objects.filter(mapping__1__value=42).count(), 1) |         self.assertEqual(Simple.objects.filter(mapping__1__value=42).count(), 1) | ||||||
|         self.assertEquals(Simple.objects.filter(mapping__2__number=1).count(), 1) |         self.assertEqual(Simple.objects.filter(mapping__2__number=1).count(), 1) | ||||||
|         self.assertEquals(Simple.objects.filter(mapping__2__complex__value=42).count(), 1) |         self.assertEqual(Simple.objects.filter(mapping__2__complex__value=42).count(), 1) | ||||||
|         self.assertEquals(Simple.objects.filter(mapping__2__list__0__value=42).count(), 1) |         self.assertEqual(Simple.objects.filter(mapping__2__list__0__value=42).count(), 1) | ||||||
|         self.assertEquals(Simple.objects.filter(mapping__2__list__1__value='foo').count(), 1) |         self.assertEqual(Simple.objects.filter(mapping__2__list__1__value='foo').count(), 1) | ||||||
|  |  | ||||||
|         # Confirm can update |         # Confirm can update | ||||||
|         Simple.objects().update(set__mapping__1=IntegerSetting(value=10)) |         Simple.objects().update(set__mapping__1=IntegerSetting(value=10)) | ||||||
|         self.assertEquals(Simple.objects.filter(mapping__1__value=10).count(), 1) |         self.assertEqual(Simple.objects.filter(mapping__1__value=10).count(), 1) | ||||||
|  |  | ||||||
|         Simple.objects().update( |         Simple.objects().update( | ||||||
|             set__mapping__2__list__1=StringSetting(value='Boo')) |             set__mapping__2__list__1=StringSetting(value='Boo')) | ||||||
|         self.assertEquals(Simple.objects.filter(mapping__2__list__1__value='foo').count(), 0) |         self.assertEqual(Simple.objects.filter(mapping__2__list__1__value='foo').count(), 0) | ||||||
|         self.assertEquals(Simple.objects.filter(mapping__2__list__1__value='Boo').count(), 1) |         self.assertEqual(Simple.objects.filter(mapping__2__list__1__value='Boo').count(), 1) | ||||||
|  |  | ||||||
|         Simple.drop_collection() |         Simple.drop_collection() | ||||||
|  |  | ||||||
| @@ -771,19 +803,19 @@ class FieldTest(unittest.TestCase): | |||||||
|         post.info = {'details': {'test': 3}} |         post.info = {'details': {'test': 3}} | ||||||
|         post.save() |         post.save() | ||||||
|  |  | ||||||
|         self.assertEquals(BlogPost.objects.count(), 3) |         self.assertEqual(BlogPost.objects.count(), 3) | ||||||
|         self.assertEquals(BlogPost.objects.filter(info__title__exact='test').count(), 1) |         self.assertEqual(BlogPost.objects.filter(info__title__exact='test').count(), 1) | ||||||
|         self.assertEquals(BlogPost.objects.filter(info__details__test__exact='test').count(), 1) |         self.assertEqual(BlogPost.objects.filter(info__details__test__exact='test').count(), 1) | ||||||
|  |  | ||||||
|         # Confirm handles non strings or non existing keys |         # Confirm handles non strings or non existing keys | ||||||
|         self.assertEquals(BlogPost.objects.filter(info__details__test__exact=5).count(), 0) |         self.assertEqual(BlogPost.objects.filter(info__details__test__exact=5).count(), 0) | ||||||
|         self.assertEquals(BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0) |         self.assertEqual(BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0) | ||||||
|  |  | ||||||
|         post = BlogPost.objects.create(info={'title': 'original'}) |         post = BlogPost.objects.create(info={'title': 'original'}) | ||||||
|         post.info.update({'title': 'updated'}) |         post.info.update({'title': 'updated'}) | ||||||
|         post.save() |         post.save() | ||||||
|         post.reload() |         post.reload() | ||||||
|         self.assertEquals('updated', post.info['title']) |         self.assertEqual('updated', post.info['title']) | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
| @@ -836,19 +868,19 @@ class FieldTest(unittest.TestCase): | |||||||
|         self.assertTrue(isinstance(e2.mapping['someint'], IntegerSetting)) |         self.assertTrue(isinstance(e2.mapping['someint'], IntegerSetting)) | ||||||
|  |  | ||||||
|         # Test querying |         # Test querying | ||||||
|         self.assertEquals(Simple.objects.filter(mapping__someint__value=42).count(), 1) |         self.assertEqual(Simple.objects.filter(mapping__someint__value=42).count(), 1) | ||||||
|         self.assertEquals(Simple.objects.filter(mapping__nested_dict__number=1).count(), 1) |         self.assertEqual(Simple.objects.filter(mapping__nested_dict__number=1).count(), 1) | ||||||
|         self.assertEquals(Simple.objects.filter(mapping__nested_dict__complex__value=42).count(), 1) |         self.assertEqual(Simple.objects.filter(mapping__nested_dict__complex__value=42).count(), 1) | ||||||
|         self.assertEquals(Simple.objects.filter(mapping__nested_dict__list__0__value=42).count(), 1) |         self.assertEqual(Simple.objects.filter(mapping__nested_dict__list__0__value=42).count(), 1) | ||||||
|         self.assertEquals(Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 1) |         self.assertEqual(Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 1) | ||||||
|  |  | ||||||
|         # Confirm can update |         # Confirm can update | ||||||
|         Simple.objects().update( |         Simple.objects().update( | ||||||
|             set__mapping={"someint": IntegerSetting(value=10)}) |             set__mapping={"someint": IntegerSetting(value=10)}) | ||||||
|         Simple.objects().update( |         Simple.objects().update( | ||||||
|             set__mapping__nested_dict__list__1=StringSetting(value='Boo')) |             set__mapping__nested_dict__list__1=StringSetting(value='Boo')) | ||||||
|         self.assertEquals(Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 0) |         self.assertEqual(Simple.objects.filter(mapping__nested_dict__list__1__value='foo').count(), 0) | ||||||
|         self.assertEquals(Simple.objects.filter(mapping__nested_dict__list__1__value='Boo').count(), 1) |         self.assertEqual(Simple.objects.filter(mapping__nested_dict__list__1__value='Boo').count(), 1) | ||||||
|  |  | ||||||
|         Simple.drop_collection() |         Simple.drop_collection() | ||||||
|  |  | ||||||
| @@ -933,6 +965,19 @@ class FieldTest(unittest.TestCase): | |||||||
|         doc = self.db.test.find_one() |         doc = self.db.test.find_one() | ||||||
|         self.assertEqual(doc['x']['DICTIONARY_KEY']['i'], 2) |         self.assertEqual(doc['x']['DICTIONARY_KEY']['i'], 2) | ||||||
|  |  | ||||||
|  |     def test_map_field_lookup(self): | ||||||
|  |         """Ensure MapField lookups succeed on Fields without a lookup method""" | ||||||
|  |  | ||||||
|  |         class Log(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             visited = MapField(DateTimeField()) | ||||||
|  |  | ||||||
|  |         Log.drop_collection() | ||||||
|  |         Log(name="wilson", visited={'friends': datetime.datetime.now()}).save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(1, Log.objects( | ||||||
|  |                                 visited__friends__exists=True).count()) | ||||||
|  |  | ||||||
|     def test_embedded_db_field(self): |     def test_embedded_db_field(self): | ||||||
|  |  | ||||||
|         class Embedded(EmbeddedDocument): |         class Embedded(EmbeddedDocument): | ||||||
| @@ -1042,6 +1087,42 @@ class FieldTest(unittest.TestCase): | |||||||
|         User.drop_collection() |         User.drop_collection() | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_dbref_reference_fields(self): | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             parent = ReferenceField('self', dbref=True) | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         p1 = Person(name="John").save() | ||||||
|  |         Person(name="Ross", parent=p1).save() | ||||||
|  |  | ||||||
|  |         col = Person._get_collection() | ||||||
|  |         data = col.find_one({'name': 'Ross'}) | ||||||
|  |         self.assertEqual(data['parent'], DBRef('person', p1.pk)) | ||||||
|  |  | ||||||
|  |         p = Person.objects.get(name="Ross") | ||||||
|  |         self.assertEqual(p.parent, p1) | ||||||
|  |  | ||||||
|  |     def test_objectid_reference_fields(self): | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             parent = ReferenceField('self', dbref=False) | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         p1 = Person(name="John").save() | ||||||
|  |         Person(name="Ross", parent=p1).save() | ||||||
|  |  | ||||||
|  |         col = Person._get_collection() | ||||||
|  |         data = col.find_one({'name': 'Ross'}) | ||||||
|  |         self.assertEqual(data['parent'], p1.pk) | ||||||
|  |  | ||||||
|  |         p = Person.objects.get(name="Ross") | ||||||
|  |         self.assertEqual(p.parent, p1) | ||||||
|  |  | ||||||
|     def test_list_item_dereference(self): |     def test_list_item_dereference(self): | ||||||
|         """Ensure that DBRef items in ListFields are dereferenced. |         """Ensure that DBRef items in ListFields are dereferenced. | ||||||
|         """ |         """ | ||||||
| @@ -1078,6 +1159,7 @@ class FieldTest(unittest.TestCase): | |||||||
|             boss = ReferenceField('self') |             boss = ReferenceField('self') | ||||||
|             friends = ListField(ReferenceField('self')) |             friends = ListField(ReferenceField('self')) | ||||||
|  |  | ||||||
|  |         Employee.drop_collection() | ||||||
|         bill = Employee(name='Bill Lumbergh') |         bill = Employee(name='Bill Lumbergh') | ||||||
|         bill.save() |         bill.save() | ||||||
|  |  | ||||||
| @@ -1201,7 +1283,41 @@ class FieldTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         class BlogPost(Document): |         class BlogPost(Document): | ||||||
|             title = StringField() |             title = StringField() | ||||||
|             author = ReferenceField(Member) |             author = ReferenceField(Member, dbref=False) | ||||||
|  |  | ||||||
|  |         Member.drop_collection() | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         m1 = Member(user_num=1) | ||||||
|  |         m1.save() | ||||||
|  |         m2 = Member(user_num=2) | ||||||
|  |         m2.save() | ||||||
|  |  | ||||||
|  |         post1 = BlogPost(title='post 1', author=m1) | ||||||
|  |         post1.save() | ||||||
|  |  | ||||||
|  |         post2 = BlogPost(title='post 2', author=m2) | ||||||
|  |         post2.save() | ||||||
|  |  | ||||||
|  |         post = BlogPost.objects(author=m1).first() | ||||||
|  |         self.assertEqual(post.id, post1.id) | ||||||
|  |  | ||||||
|  |         post = BlogPost.objects(author=m2).first() | ||||||
|  |         self.assertEqual(post.id, post2.id) | ||||||
|  |  | ||||||
|  |         Member.drop_collection() | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_reference_query_conversion_dbref(self): | ||||||
|  |         """Ensure that ReferenceFields can be queried using objects and values | ||||||
|  |         of the type of the primary key of the referenced object. | ||||||
|  |         """ | ||||||
|  |         class Member(Document): | ||||||
|  |             user_num = IntField(primary_key=True) | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             title = StringField() | ||||||
|  |             author = ReferenceField(Member, dbref=True) | ||||||
|  |  | ||||||
|         Member.drop_collection() |         Member.drop_collection() | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
| @@ -1345,7 +1461,7 @@ class FieldTest(unittest.TestCase): | |||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|         Person(name="Wilson Jr").save() |         Person(name="Wilson Jr").save() | ||||||
|  |  | ||||||
|         self.assertEquals(repr(Person.objects(city=None)), |         self.assertEqual(repr(Person.objects(city=None)), | ||||||
|                             "[<Person: Person object>]") |                             "[<Person: Person object>]") | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -1423,7 +1539,7 @@ class FieldTest(unittest.TestCase): | |||||||
|             content_type = StringField() |             content_type = StringField() | ||||||
|             blob = BinaryField() |             blob = BinaryField() | ||||||
|  |  | ||||||
|         BLOB = '\xe6\x00\xc4\xff\x07' |         BLOB = b('\xe6\x00\xc4\xff\x07') | ||||||
|         MIME_TYPE = 'application/octet-stream' |         MIME_TYPE = 'application/octet-stream' | ||||||
|  |  | ||||||
|         Attachment.drop_collection() |         Attachment.drop_collection() | ||||||
| @@ -1433,7 +1549,7 @@ class FieldTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         attachment_1 = Attachment.objects().first() |         attachment_1 = Attachment.objects().first() | ||||||
|         self.assertEqual(MIME_TYPE, attachment_1.content_type) |         self.assertEqual(MIME_TYPE, attachment_1.content_type) | ||||||
|         self.assertEqual(BLOB, attachment_1.blob) |         self.assertEqual(BLOB, bin_type(attachment_1.blob)) | ||||||
|  |  | ||||||
|         Attachment.drop_collection() |         Attachment.drop_collection() | ||||||
|  |  | ||||||
| @@ -1460,18 +1576,30 @@ class FieldTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         attachment_required = AttachmentRequired() |         attachment_required = AttachmentRequired() | ||||||
|         self.assertRaises(ValidationError, attachment_required.validate) |         self.assertRaises(ValidationError, attachment_required.validate) | ||||||
|         attachment_required.blob = '\xe6\x00\xc4\xff\x07' |         attachment_required.blob = Binary(b('\xe6\x00\xc4\xff\x07')) | ||||||
|         attachment_required.validate() |         attachment_required.validate() | ||||||
|  |  | ||||||
|         attachment_size_limit = AttachmentSizeLimit(blob='\xe6\x00\xc4\xff\x07') |         attachment_size_limit = AttachmentSizeLimit(blob=b('\xe6\x00\xc4\xff\x07')) | ||||||
|         self.assertRaises(ValidationError, attachment_size_limit.validate) |         self.assertRaises(ValidationError, attachment_size_limit.validate) | ||||||
|         attachment_size_limit.blob = '\xe6\x00\xc4\xff' |         attachment_size_limit.blob = b('\xe6\x00\xc4\xff') | ||||||
|         attachment_size_limit.validate() |         attachment_size_limit.validate() | ||||||
|  |  | ||||||
|         Attachment.drop_collection() |         Attachment.drop_collection() | ||||||
|         AttachmentRequired.drop_collection() |         AttachmentRequired.drop_collection() | ||||||
|         AttachmentSizeLimit.drop_collection() |         AttachmentSizeLimit.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_binary_field_primary(self): | ||||||
|  |  | ||||||
|  |         class Attachment(Document): | ||||||
|  |             id = BinaryField(primary_key=True) | ||||||
|  |  | ||||||
|  |         Attachment.drop_collection() | ||||||
|  |  | ||||||
|  |         att = Attachment(id=uuid.uuid4().bytes).save() | ||||||
|  |         att.delete() | ||||||
|  |  | ||||||
|  |         self.assertEqual(0, Attachment.objects.count()) | ||||||
|  |  | ||||||
|     def test_choices_validation(self): |     def test_choices_validation(self): | ||||||
|         """Ensure that value is in a container of allowed values. |         """Ensure that value is in a container of allowed values. | ||||||
|         """ |         """ | ||||||
| @@ -1572,16 +1700,16 @@ class FieldTest(unittest.TestCase): | |||||||
|         """Ensure that file fields can be written to and their data retrieved |         """Ensure that file fields can be written to and their data retrieved | ||||||
|         """ |         """ | ||||||
|         class PutFile(Document): |         class PutFile(Document): | ||||||
|             file = FileField() |             the_file = FileField() | ||||||
|  |  | ||||||
|         class StreamFile(Document): |         class StreamFile(Document): | ||||||
|             file = FileField() |             the_file = FileField() | ||||||
|  |  | ||||||
|         class SetFile(Document): |         class SetFile(Document): | ||||||
|             file = FileField() |             the_file = FileField() | ||||||
|  |  | ||||||
|         text = 'Hello, World!' |         text = b('Hello, World!') | ||||||
|         more_text = 'Foo Bar' |         more_text = b('Foo Bar') | ||||||
|         content_type = 'text/plain' |         content_type = 'text/plain' | ||||||
|  |  | ||||||
|         PutFile.drop_collection() |         PutFile.drop_collection() | ||||||
| @@ -1589,68 +1717,68 @@ class FieldTest(unittest.TestCase): | |||||||
|         SetFile.drop_collection() |         SetFile.drop_collection() | ||||||
|  |  | ||||||
|         putfile = PutFile() |         putfile = PutFile() | ||||||
|         putfile.file.put(text, content_type=content_type) |         putfile.the_file.put(text, content_type=content_type) | ||||||
|         putfile.save() |         putfile.save() | ||||||
|         putfile.validate() |         putfile.validate() | ||||||
|         result = PutFile.objects.first() |         result = PutFile.objects.first() | ||||||
|         self.assertTrue(putfile == result) |         self.assertTrue(putfile == result) | ||||||
|         self.assertEquals(result.file.read(), text) |         self.assertEqual(result.the_file.read(), text) | ||||||
|         self.assertEquals(result.file.content_type, content_type) |         self.assertEqual(result.the_file.content_type, content_type) | ||||||
|         result.file.delete() # Remove file from GridFS |         result.the_file.delete() # Remove file from GridFS | ||||||
|         PutFile.objects.delete() |         PutFile.objects.delete() | ||||||
|  |  | ||||||
|         # Ensure file-like objects are stored |         # Ensure file-like objects are stored | ||||||
|         putfile = PutFile() |         putfile = PutFile() | ||||||
|         putstring = StringIO.StringIO() |         putstring = StringIO() | ||||||
|         putstring.write(text) |         putstring.write(text) | ||||||
|         putstring.seek(0) |         putstring.seek(0) | ||||||
|         putfile.file.put(putstring, content_type=content_type) |         putfile.the_file.put(putstring, content_type=content_type) | ||||||
|         putfile.save() |         putfile.save() | ||||||
|         putfile.validate() |         putfile.validate() | ||||||
|         result = PutFile.objects.first() |         result = PutFile.objects.first() | ||||||
|         self.assertTrue(putfile == result) |         self.assertTrue(putfile == result) | ||||||
|         self.assertEquals(result.file.read(), text) |         self.assertEqual(result.the_file.read(), text) | ||||||
|         self.assertEquals(result.file.content_type, content_type) |         self.assertEqual(result.the_file.content_type, content_type) | ||||||
|         result.file.delete() |         result.the_file.delete() | ||||||
|  |  | ||||||
|         streamfile = StreamFile() |         streamfile = StreamFile() | ||||||
|         streamfile.file.new_file(content_type=content_type) |         streamfile.the_file.new_file(content_type=content_type) | ||||||
|         streamfile.file.write(text) |         streamfile.the_file.write(text) | ||||||
|         streamfile.file.write(more_text) |         streamfile.the_file.write(more_text) | ||||||
|         streamfile.file.close() |         streamfile.the_file.close() | ||||||
|         streamfile.save() |         streamfile.save() | ||||||
|         streamfile.validate() |         streamfile.validate() | ||||||
|         result = StreamFile.objects.first() |         result = StreamFile.objects.first() | ||||||
|         self.assertTrue(streamfile == result) |         self.assertTrue(streamfile == result) | ||||||
|         self.assertEquals(result.file.read(), text + more_text) |         self.assertEqual(result.the_file.read(), text + more_text) | ||||||
|         self.assertEquals(result.file.content_type, content_type) |         self.assertEqual(result.the_file.content_type, content_type) | ||||||
|         result.file.seek(0) |         result.the_file.seek(0) | ||||||
|         self.assertEquals(result.file.tell(), 0) |         self.assertEqual(result.the_file.tell(), 0) | ||||||
|         self.assertEquals(result.file.read(len(text)), text) |         self.assertEqual(result.the_file.read(len(text)), text) | ||||||
|         self.assertEquals(result.file.tell(), len(text)) |         self.assertEqual(result.the_file.tell(), len(text)) | ||||||
|         self.assertEquals(result.file.read(len(more_text)), more_text) |         self.assertEqual(result.the_file.read(len(more_text)), more_text) | ||||||
|         self.assertEquals(result.file.tell(), len(text + more_text)) |         self.assertEqual(result.the_file.tell(), len(text + more_text)) | ||||||
|         result.file.delete() |         result.the_file.delete() | ||||||
|  |  | ||||||
|         # Ensure deleted file returns None |         # Ensure deleted file returns None | ||||||
|         self.assertTrue(result.file.read() == None) |         self.assertTrue(result.the_file.read() == None) | ||||||
|  |  | ||||||
|         setfile = SetFile() |         setfile = SetFile() | ||||||
|         setfile.file = text |         setfile.the_file = text | ||||||
|         setfile.save() |         setfile.save() | ||||||
|         setfile.validate() |         setfile.validate() | ||||||
|         result = SetFile.objects.first() |         result = SetFile.objects.first() | ||||||
|         self.assertTrue(setfile == result) |         self.assertTrue(setfile == result) | ||||||
|         self.assertEquals(result.file.read(), text) |         self.assertEqual(result.the_file.read(), text) | ||||||
|  |  | ||||||
|         # Try replacing file with new one |         # Try replacing file with new one | ||||||
|         result.file.replace(more_text) |         result.the_file.replace(more_text) | ||||||
|         result.save() |         result.save() | ||||||
|         result.validate() |         result.validate() | ||||||
|         result = SetFile.objects.first() |         result = SetFile.objects.first() | ||||||
|         self.assertTrue(setfile == result) |         self.assertTrue(setfile == result) | ||||||
|         self.assertEquals(result.file.read(), more_text) |         self.assertEqual(result.the_file.read(), more_text) | ||||||
|         result.file.delete() |         result.the_file.delete() | ||||||
|  |  | ||||||
|         PutFile.drop_collection() |         PutFile.drop_collection() | ||||||
|         StreamFile.drop_collection() |         StreamFile.drop_collection() | ||||||
| @@ -1658,7 +1786,7 @@ class FieldTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         # Make sure FileField is optional and not required |         # Make sure FileField is optional and not required | ||||||
|         class DemoFile(Document): |         class DemoFile(Document): | ||||||
|             file = FileField() |             the_file = FileField() | ||||||
|         DemoFile.objects.create() |         DemoFile.objects.create() | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -1670,7 +1798,7 @@ class FieldTest(unittest.TestCase): | |||||||
|         GridDocument.drop_collection() |         GridDocument.drop_collection() | ||||||
|  |  | ||||||
|         with tempfile.TemporaryFile() as f: |         with tempfile.TemporaryFile() as f: | ||||||
|             f.write("Hello World!") |             f.write(b("Hello World!")) | ||||||
|             f.flush() |             f.flush() | ||||||
|  |  | ||||||
|             # Test without default |             # Test without default | ||||||
| @@ -1681,48 +1809,48 @@ class FieldTest(unittest.TestCase): | |||||||
|             doc_b = GridDocument.objects.with_id(doc_a.id) |             doc_b = GridDocument.objects.with_id(doc_a.id) | ||||||
|             doc_b.the_file.replace(f, filename='doc_b') |             doc_b.the_file.replace(f, filename='doc_b') | ||||||
|             doc_b.save() |             doc_b.save() | ||||||
|             self.assertNotEquals(doc_b.the_file.grid_id, None) |             self.assertNotEqual(doc_b.the_file.grid_id, None) | ||||||
|  |  | ||||||
|             # Test it matches |             # Test it matches | ||||||
|             doc_c = GridDocument.objects.with_id(doc_b.id) |             doc_c = GridDocument.objects.with_id(doc_b.id) | ||||||
|             self.assertEquals(doc_b.the_file.grid_id, doc_c.the_file.grid_id) |             self.assertEqual(doc_b.the_file.grid_id, doc_c.the_file.grid_id) | ||||||
|  |  | ||||||
|             # Test with default |             # Test with default | ||||||
|             doc_d = GridDocument(the_file='') |             doc_d = GridDocument(the_file=b('')) | ||||||
|             doc_d.save() |             doc_d.save() | ||||||
|  |  | ||||||
|             doc_e = GridDocument.objects.with_id(doc_d.id) |             doc_e = GridDocument.objects.with_id(doc_d.id) | ||||||
|             self.assertEquals(doc_d.the_file.grid_id, doc_e.the_file.grid_id) |             self.assertEqual(doc_d.the_file.grid_id, doc_e.the_file.grid_id) | ||||||
|  |  | ||||||
|             doc_e.the_file.replace(f, filename='doc_e') |             doc_e.the_file.replace(f, filename='doc_e') | ||||||
|             doc_e.save() |             doc_e.save() | ||||||
|  |  | ||||||
|             doc_f = GridDocument.objects.with_id(doc_e.id) |             doc_f = GridDocument.objects.with_id(doc_e.id) | ||||||
|             self.assertEquals(doc_e.the_file.grid_id, doc_f.the_file.grid_id) |             self.assertEqual(doc_e.the_file.grid_id, doc_f.the_file.grid_id) | ||||||
|  |  | ||||||
|         db = GridDocument._get_db() |         db = GridDocument._get_db() | ||||||
|         grid_fs = gridfs.GridFS(db) |         grid_fs = gridfs.GridFS(db) | ||||||
|         self.assertEquals(['doc_b', 'doc_e'], grid_fs.list()) |         self.assertEqual(['doc_b', 'doc_e'], grid_fs.list()) | ||||||
|  |  | ||||||
|     def test_file_uniqueness(self): |     def test_file_uniqueness(self): | ||||||
|         """Ensure that each instance of a FileField is unique |         """Ensure that each instance of a FileField is unique | ||||||
|         """ |         """ | ||||||
|         class TestFile(Document): |         class TestFile(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             file = FileField() |             the_file = FileField() | ||||||
|  |  | ||||||
|         # First instance |         # First instance | ||||||
|         testfile = TestFile() |         test_file = TestFile() | ||||||
|         testfile.name = "Hello, World!" |         test_file.name = "Hello, World!" | ||||||
|         testfile.file.put('Hello, World!') |         test_file.the_file.put(b('Hello, World!')) | ||||||
|         testfile.save() |         test_file.save() | ||||||
|  |  | ||||||
|         # Second instance |         # Second instance | ||||||
|         testfiledupe = TestFile() |         test_file_dupe = TestFile() | ||||||
|         data = testfiledupe.file.read() # Should be None |         data = test_file_dupe.the_file.read() # Should be None | ||||||
|  |  | ||||||
|         self.assertTrue(testfile.name != testfiledupe.name) |         self.assertTrue(test_file.name != test_file_dupe.name) | ||||||
|         self.assertTrue(testfile.file.read() != data) |         self.assertTrue(test_file.the_file.read() != data) | ||||||
|  |  | ||||||
|         TestFile.drop_collection() |         TestFile.drop_collection() | ||||||
|  |  | ||||||
| @@ -1730,18 +1858,28 @@ class FieldTest(unittest.TestCase): | |||||||
|         """Ensure that a boolean test of a FileField indicates its presence |         """Ensure that a boolean test of a FileField indicates its presence | ||||||
|         """ |         """ | ||||||
|         class TestFile(Document): |         class TestFile(Document): | ||||||
|             file = FileField() |             the_file = FileField() | ||||||
|  |  | ||||||
|         testfile = TestFile() |         test_file = TestFile() | ||||||
|         self.assertFalse(bool(testfile.file)) |         self.assertFalse(bool(test_file.the_file)) | ||||||
|         testfile.file = 'Hello, World!' |         test_file.the_file = b('Hello, World!') | ||||||
|         testfile.file.content_type = 'text/plain' |         test_file.the_file.content_type = 'text/plain' | ||||||
|         testfile.save() |         test_file.save() | ||||||
|         self.assertTrue(bool(testfile.file)) |         self.assertTrue(bool(test_file.the_file)) | ||||||
|  |  | ||||||
|         TestFile.drop_collection() |         TestFile.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_file_cmp(self): | ||||||
|  |         """Test comparing against other types""" | ||||||
|  |         class TestFile(Document): | ||||||
|  |             the_file = FileField() | ||||||
|  |  | ||||||
|  |         test_file = TestFile() | ||||||
|  |         self.assertFalse(test_file.the_file in [{"test": 1}]) | ||||||
|  |  | ||||||
|     def test_image_field(self): |     def test_image_field(self): | ||||||
|  |         if PY3: | ||||||
|  |             raise SkipTest('PIL does not have Python 3 support') | ||||||
|  |  | ||||||
|         class TestImage(Document): |         class TestImage(Document): | ||||||
|             image = ImageField() |             image = ImageField() | ||||||
| @@ -1754,15 +1892,17 @@ class FieldTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         t = TestImage.objects.first() |         t = TestImage.objects.first() | ||||||
|  |  | ||||||
|         self.assertEquals(t.image.format, 'PNG') |         self.assertEqual(t.image.format, 'PNG') | ||||||
|  |  | ||||||
|         w, h = t.image.size |         w, h = t.image.size | ||||||
|         self.assertEquals(w, 371) |         self.assertEqual(w, 371) | ||||||
|         self.assertEquals(h, 76) |         self.assertEqual(h, 76) | ||||||
|  |  | ||||||
|         t.image.delete() |         t.image.delete() | ||||||
|  |  | ||||||
|     def test_image_field_resize(self): |     def test_image_field_resize(self): | ||||||
|  |         if PY3: | ||||||
|  |             raise SkipTest('PIL does not have Python 3 support') | ||||||
|  |  | ||||||
|         class TestImage(Document): |         class TestImage(Document): | ||||||
|             image = ImageField(size=(185, 37)) |             image = ImageField(size=(185, 37)) | ||||||
| @@ -1775,15 +1915,40 @@ class FieldTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         t = TestImage.objects.first() |         t = TestImage.objects.first() | ||||||
|  |  | ||||||
|         self.assertEquals(t.image.format, 'PNG') |         self.assertEqual(t.image.format, 'PNG') | ||||||
|         w, h = t.image.size |         w, h = t.image.size | ||||||
|  |  | ||||||
|         self.assertEquals(w, 185) |         self.assertEqual(w, 185) | ||||||
|         self.assertEquals(h, 37) |         self.assertEqual(h, 37) | ||||||
|  |  | ||||||
|  |         t.image.delete() | ||||||
|  |  | ||||||
|  |     def test_image_field_resize_force(self): | ||||||
|  |         if PY3: | ||||||
|  |             raise SkipTest('PIL does not have Python 3 support') | ||||||
|  |  | ||||||
|  |         class TestImage(Document): | ||||||
|  |             image = ImageField(size=(185, 37, True)) | ||||||
|  |  | ||||||
|  |         TestImage.drop_collection() | ||||||
|  |  | ||||||
|  |         t = TestImage() | ||||||
|  |         t.image.put(open(TEST_IMAGE_PATH, 'r')) | ||||||
|  |         t.save() | ||||||
|  |  | ||||||
|  |         t = TestImage.objects.first() | ||||||
|  |  | ||||||
|  |         self.assertEqual(t.image.format, 'PNG') | ||||||
|  |         w, h = t.image.size | ||||||
|  |  | ||||||
|  |         self.assertEqual(w, 185) | ||||||
|  |         self.assertEqual(h, 37) | ||||||
|  |  | ||||||
|         t.image.delete() |         t.image.delete() | ||||||
|  |  | ||||||
|     def test_image_field_thumbnail(self): |     def test_image_field_thumbnail(self): | ||||||
|  |         if PY3: | ||||||
|  |             raise SkipTest('PIL does not have Python 3 support') | ||||||
|  |  | ||||||
|         class TestImage(Document): |         class TestImage(Document): | ||||||
|             image = ImageField(thumbnail_size=(92, 18)) |             image = ImageField(thumbnail_size=(92, 18)) | ||||||
| @@ -1796,39 +1961,38 @@ class FieldTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         t = TestImage.objects.first() |         t = TestImage.objects.first() | ||||||
|  |  | ||||||
|         self.assertEquals(t.image.thumbnail.format, 'PNG') |         self.assertEqual(t.image.thumbnail.format, 'PNG') | ||||||
|         self.assertEquals(t.image.thumbnail.width, 92) |         self.assertEqual(t.image.thumbnail.width, 92) | ||||||
|         self.assertEquals(t.image.thumbnail.height, 18) |         self.assertEqual(t.image.thumbnail.height, 18) | ||||||
|  |  | ||||||
|         t.image.delete() |         t.image.delete() | ||||||
|  |  | ||||||
|  |  | ||||||
|     def test_file_multidb(self): |     def test_file_multidb(self): | ||||||
|         register_connection('testfiles', 'testfiles') |         register_connection('test_files', 'test_files') | ||||||
|         class TestFile(Document): |         class TestFile(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             file = FileField(db_alias="testfiles", |             the_file = FileField(db_alias="test_files", | ||||||
|                              collection_name="macumba") |                                  collection_name="macumba") | ||||||
|  |  | ||||||
|         TestFile.drop_collection() |         TestFile.drop_collection() | ||||||
|  |  | ||||||
|         # delete old filesystem |         # delete old filesystem | ||||||
|         get_db("testfiles").macumba.files.drop() |         get_db("test_files").macumba.files.drop() | ||||||
|         get_db("testfiles").macumba.chunks.drop() |         get_db("test_files").macumba.chunks.drop() | ||||||
|  |  | ||||||
|         # First instance |         # First instance | ||||||
|         testfile = TestFile() |         test_file = TestFile() | ||||||
|         testfile.name = "Hello, World!" |         test_file.name = "Hello, World!" | ||||||
|         testfile.file.put('Hello, World!', |         test_file.the_file.put(b('Hello, World!'), | ||||||
|                           name="hello.txt") |                           name="hello.txt") | ||||||
|         testfile.save() |         test_file.save() | ||||||
|  |  | ||||||
|         data = get_db("testfiles").macumba.files.find_one() |         data = get_db("test_files").macumba.files.find_one() | ||||||
|         self.assertEquals(data.get('name'), 'hello.txt') |         self.assertEqual(data.get('name'), 'hello.txt') | ||||||
|  |  | ||||||
|         testfile = TestFile.objects.first() |         test_file = TestFile.objects.first() | ||||||
|         self.assertEquals(testfile.file.read(), |         self.assertEqual(test_file.the_file.read(), | ||||||
|                           'Hello, World!') |                           b('Hello, World!')) | ||||||
|  |  | ||||||
|     def test_geo_indexes(self): |     def test_geo_indexes(self): | ||||||
|         """Ensure that indexes are created automatically for GeoPointFields. |         """Ensure that indexes are created automatically for GeoPointFields. | ||||||
| @@ -1903,6 +2067,27 @@ class FieldTest(unittest.TestCase): | |||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) |         c = self.db['mongoengine.counters'].find_one({'_id': 'person.id'}) | ||||||
|         self.assertEqual(c['next'], 10) |         self.assertEqual(c['next'], 10) | ||||||
|  |  | ||||||
|  |     def test_sequence_field_sequence_name(self): | ||||||
|  |         class Person(Document): | ||||||
|  |             id = SequenceField(primary_key=True, sequence_name='jelly') | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         self.db['mongoengine.counters'].drop() | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         for x in xrange(10): | ||||||
|  |             p = Person(name="Person %s" % x) | ||||||
|  |             p.save() | ||||||
|  |  | ||||||
|  |         c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'}) | ||||||
|  |         self.assertEqual(c['next'], 10) | ||||||
|  |  | ||||||
|  |         ids = [i.id for i in Person.objects] | ||||||
|  |         self.assertEqual(ids, range(1, 11)) | ||||||
|  |  | ||||||
|  |         c = self.db['mongoengine.counters'].find_one({'_id': 'jelly.id'}) | ||||||
|  |         self.assertEqual(c['next'], 10) | ||||||
|  |  | ||||||
|     def test_multiple_sequence_fields(self): |     def test_multiple_sequence_fields(self): | ||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             id = SequenceField(primary_key=True) |             id = SequenceField(primary_key=True) | ||||||
| @@ -2089,27 +2274,27 @@ class FieldTest(unittest.TestCase): | |||||||
|         post.comments.append(Comment(content='hello', author=bob)) |         post.comments.append(Comment(content='hello', author=bob)) | ||||||
|         post.comments.append(Comment(author=bob)) |         post.comments.append(Comment(author=bob)) | ||||||
|  |  | ||||||
|  |         self.assertRaises(ValidationError, post.validate) | ||||||
|         try: |         try: | ||||||
|             post.validate() |             post.validate() | ||||||
|         except ValidationError, error: |         except ValidationError, error: | ||||||
|             pass |             # ValidationError.errors property | ||||||
|  |             self.assertTrue(hasattr(error, 'errors')) | ||||||
|  |             self.assertTrue(isinstance(error.errors, dict)) | ||||||
|  |             self.assertTrue('comments' in error.errors) | ||||||
|  |             self.assertTrue(1 in error.errors['comments']) | ||||||
|  |             self.assertTrue(isinstance(error.errors['comments'][1]['content'], | ||||||
|  |                             ValidationError)) | ||||||
|  |  | ||||||
|         # ValidationError.errors property |             # ValidationError.schema property | ||||||
|         self.assertTrue(hasattr(error, 'errors')) |             error_dict = error.to_dict() | ||||||
|         self.assertTrue(isinstance(error.errors, dict)) |             self.assertTrue(isinstance(error_dict, dict)) | ||||||
|         self.assertTrue('comments' in error.errors) |             self.assertTrue('comments' in error_dict) | ||||||
|         self.assertTrue(1 in error.errors['comments']) |             self.assertTrue(1 in error_dict['comments']) | ||||||
|         self.assertTrue(isinstance(error.errors['comments'][1]['content'], |             self.assertTrue('content' in error_dict['comments'][1]) | ||||||
|                         ValidationError)) |             self.assertEqual(error_dict['comments'][1]['content'], | ||||||
|  |                               u'Field is required') | ||||||
|  |  | ||||||
|         # ValidationError.schema property |  | ||||||
|         error_dict = error.to_dict() |  | ||||||
|         self.assertTrue(isinstance(error_dict, dict)) |  | ||||||
|         self.assertTrue('comments' in error_dict) |  | ||||||
|         self.assertTrue(1 in error_dict['comments']) |  | ||||||
|         self.assertTrue('content' in error_dict['comments'][1]) |  | ||||||
|         self.assertEquals(error_dict['comments'][1]['content'], |  | ||||||
|                           u'Field is required ("content")') |  | ||||||
|  |  | ||||||
|         post.comments[1].content = 'here we go' |         post.comments[1].content = 'here we go' | ||||||
|         post.validate() |         post.validate() | ||||||
|   | |||||||
| @@ -1,16 +1,19 @@ | |||||||
| # -*- coding: utf-8 -*- | from __future__ import with_statement | ||||||
| import unittest | import unittest | ||||||
| import pymongo |  | ||||||
| from bson import ObjectId |  | ||||||
| from datetime import datetime, timedelta | from datetime import datetime, timedelta | ||||||
|  |  | ||||||
|  | import pymongo | ||||||
|  |  | ||||||
|  | from bson import ObjectId | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  | from mongoengine.connection import get_connection | ||||||
|  | from mongoengine.python_support import PY3 | ||||||
|  | from mongoengine.tests import query_counter | ||||||
| from mongoengine.queryset import (QuerySet, QuerySetManager, | from mongoengine.queryset import (QuerySet, QuerySetManager, | ||||||
|                                   MultipleObjectsReturned, DoesNotExist, |                                   MultipleObjectsReturned, DoesNotExist, | ||||||
|                                   QueryFieldList) |                                   QueryFieldList) | ||||||
| from mongoengine import * |  | ||||||
| from mongoengine.connection import get_connection |  | ||||||
| from mongoengine.tests import query_counter |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class QuerySetTest(unittest.TestCase): | class QuerySetTest(unittest.TestCase): | ||||||
|  |  | ||||||
| @@ -21,6 +24,8 @@ class QuerySetTest(unittest.TestCase): | |||||||
|             name = StringField() |             name = StringField() | ||||||
|             age = IntField() |             age = IntField() | ||||||
|             meta = {'allow_inheritance': True} |             meta = {'allow_inheritance': True} | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|         self.Person = Person |         self.Person = Person | ||||||
|  |  | ||||||
|     def test_initialisation(self): |     def test_initialisation(self): | ||||||
| @@ -225,6 +230,30 @@ class QuerySetTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         Blog.drop_collection() |         Blog.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_chaining(self): | ||||||
|  |         class A(Document): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class B(Document): | ||||||
|  |             a = ReferenceField(A) | ||||||
|  |  | ||||||
|  |         A.drop_collection() | ||||||
|  |         B.drop_collection() | ||||||
|  |  | ||||||
|  |         a1 = A().save() | ||||||
|  |         a2 = A().save() | ||||||
|  |  | ||||||
|  |         B(a=a1).save() | ||||||
|  |  | ||||||
|  |         # Works | ||||||
|  |         q1 = B.objects.filter(a__in=[a1, a2], a=a1)._query | ||||||
|  |  | ||||||
|  |         # Doesn't work | ||||||
|  |         q2 = B.objects.filter(a__in=[a1, a2]) | ||||||
|  |         q2 = q2.filter(a=a1)._query | ||||||
|  |  | ||||||
|  |         self.assertEqual(q1, q2) | ||||||
|  |  | ||||||
|     def test_update_write_options(self): |     def test_update_write_options(self): | ||||||
|         """Test that passing write_options works""" |         """Test that passing write_options works""" | ||||||
|  |  | ||||||
| @@ -239,11 +268,11 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         self.Person.objects.update(set__name='Ross', write_options=write_options) |         self.Person.objects.update(set__name='Ross', write_options=write_options) | ||||||
|  |  | ||||||
|         author = self.Person.objects.first() |         author = self.Person.objects.first() | ||||||
|         self.assertEquals(author.name, 'Ross') |         self.assertEqual(author.name, 'Ross') | ||||||
|  |  | ||||||
|         self.Person.objects.update_one(set__name='Test User', write_options=write_options) |         self.Person.objects.update_one(set__name='Test User', write_options=write_options) | ||||||
|         author = self.Person.objects.first() |         author = self.Person.objects.first() | ||||||
|         self.assertEquals(author.name, 'Test User') |         self.assertEqual(author.name, 'Test User') | ||||||
|  |  | ||||||
|     def test_update_update_has_a_value(self): |     def test_update_update_has_a_value(self): | ||||||
|         """Test to ensure that update is passed a value to update to""" |         """Test to ensure that update is passed a value to update to""" | ||||||
| @@ -332,8 +361,8 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         BlogPost.objects(comments__by="jane").update(inc__comments__S__votes=1) |         BlogPost.objects(comments__by="jane").update(inc__comments__S__votes=1) | ||||||
|  |  | ||||||
|         post = BlogPost.objects.first() |         post = BlogPost.objects.first() | ||||||
|         self.assertEquals(post.comments[1].by, 'jane') |         self.assertEqual(post.comments[1].by, 'jane') | ||||||
|         self.assertEquals(post.comments[1].votes, 8) |         self.assertEqual(post.comments[1].votes, 8) | ||||||
|  |  | ||||||
|         # Currently the $ operator only applies to the first matched item in |         # Currently the $ operator only applies to the first matched item in | ||||||
|         # the query |         # the query | ||||||
| @@ -346,7 +375,7 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         Simple.objects(x=2).update(inc__x__S=1) |         Simple.objects(x=2).update(inc__x__S=1) | ||||||
|  |  | ||||||
|         simple = Simple.objects.first() |         simple = Simple.objects.first() | ||||||
|         self.assertEquals(simple.x, [1, 3, 3, 2]) |         self.assertEqual(simple.x, [1, 3, 3, 2]) | ||||||
|         Simple.drop_collection() |         Simple.drop_collection() | ||||||
|  |  | ||||||
|         # You can set multiples |         # You can set multiples | ||||||
| @@ -358,10 +387,10 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         Simple.objects(x=3).update(set__x__S=0) |         Simple.objects(x=3).update(set__x__S=0) | ||||||
|  |  | ||||||
|         s = Simple.objects() |         s = Simple.objects() | ||||||
|         self.assertEquals(s[0].x, [1, 2, 0, 4]) |         self.assertEqual(s[0].x, [1, 2, 0, 4]) | ||||||
|         self.assertEquals(s[1].x, [2, 0, 4, 5]) |         self.assertEqual(s[1].x, [2, 0, 4, 5]) | ||||||
|         self.assertEquals(s[2].x, [0, 4, 5, 6]) |         self.assertEqual(s[2].x, [0, 4, 5, 6]) | ||||||
|         self.assertEquals(s[3].x, [4, 5, 6, 7]) |         self.assertEqual(s[3].x, [4, 5, 6, 7]) | ||||||
|  |  | ||||||
|         # Using "$unset" with an expression like this "array.$" will result in |         # Using "$unset" with an expression like this "array.$" will result in | ||||||
|         # the array item becoming None, not being removed. |         # the array item becoming None, not being removed. | ||||||
| @@ -369,14 +398,14 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         Simple(x=[1, 2, 3, 4, 3, 2, 3, 4]).save() |         Simple(x=[1, 2, 3, 4, 3, 2, 3, 4]).save() | ||||||
|         Simple.objects(x=3).update(unset__x__S=1) |         Simple.objects(x=3).update(unset__x__S=1) | ||||||
|         simple = Simple.objects.first() |         simple = Simple.objects.first() | ||||||
|         self.assertEquals(simple.x, [1, 2, None, 4, 3, 2, 3, 4]) |         self.assertEqual(simple.x, [1, 2, None, 4, 3, 2, 3, 4]) | ||||||
|  |  | ||||||
|         # Nested updates arent supported yet.. |         # Nested updates arent supported yet.. | ||||||
|         def update_nested(): |         def update_nested(): | ||||||
|             Simple.drop_collection() |             Simple.drop_collection() | ||||||
|             Simple(x=[{'test': [1, 2, 3, 4]}]).save() |             Simple(x=[{'test': [1, 2, 3, 4]}]).save() | ||||||
|             Simple.objects(x__test=2).update(set__x__S__test__S=3) |             Simple.objects(x__test=2).update(set__x__S__test__S=3) | ||||||
|             self.assertEquals(simple.x, [1, 2, 3, 4]) |             self.assertEqual(simple.x, [1, 2, 3, 4]) | ||||||
|  |  | ||||||
|         self.assertRaises(OperationError, update_nested) |         self.assertRaises(OperationError, update_nested) | ||||||
|         Simple.drop_collection() |         Simple.drop_collection() | ||||||
| @@ -406,8 +435,32 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         BlogPost.objects(comments__by="joe").update(set__comments__S__votes=Vote(score=4)) |         BlogPost.objects(comments__by="joe").update(set__comments__S__votes=Vote(score=4)) | ||||||
|  |  | ||||||
|         post = BlogPost.objects.first() |         post = BlogPost.objects.first() | ||||||
|         self.assertEquals(post.comments[0].by, 'joe') |         self.assertEqual(post.comments[0].by, 'joe') | ||||||
|         self.assertEquals(post.comments[0].votes.score, 4) |         self.assertEqual(post.comments[0].votes.score, 4) | ||||||
|  |  | ||||||
|  |     def test_updates_can_have_match_operators(self): | ||||||
|  |  | ||||||
|  |         class Post(Document): | ||||||
|  |             title = StringField(required=True) | ||||||
|  |             tags = ListField(StringField()) | ||||||
|  |             comments = ListField(EmbeddedDocumentField("Comment")) | ||||||
|  |  | ||||||
|  |         class Comment(EmbeddedDocument): | ||||||
|  |             content = StringField() | ||||||
|  |             name = StringField(max_length=120) | ||||||
|  |             vote = IntField() | ||||||
|  |  | ||||||
|  |         Post.drop_collection() | ||||||
|  |  | ||||||
|  |         comm1 = Comment(content="very funny indeed", name="John S", vote=1) | ||||||
|  |         comm2 = Comment(content="kind of funny", name="Mark P", vote=0) | ||||||
|  |  | ||||||
|  |         Post(title='Fun with MongoEngine', tags=['mongodb', 'mongoengine'], | ||||||
|  |              comments=[comm1, comm2]).save() | ||||||
|  |  | ||||||
|  |         Post.objects().update_one(pull__comments__vote__lt=1) | ||||||
|  |  | ||||||
|  |         self.assertEqual(1, len(Post.objects.first().comments)) | ||||||
|  |  | ||||||
|     def test_mapfield_update(self): |     def test_mapfield_update(self): | ||||||
|         """Ensure that the MapField can be updated.""" |         """Ensure that the MapField can be updated.""" | ||||||
| @@ -496,7 +549,7 @@ class QuerySetTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         Blog.drop_collection() |         Blog.drop_collection() | ||||||
|  |  | ||||||
|         # Recreates the collection |         # Recreates the collection | ||||||
|         self.assertEqual(0, Blog.objects.count()) |         self.assertEqual(0, Blog.objects.count()) | ||||||
|  |  | ||||||
|         with query_counter() as q: |         with query_counter() as q: | ||||||
| @@ -561,7 +614,7 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         Blog.drop_collection() |         Blog.drop_collection() | ||||||
|         blog1 = Blog(title="code", posts=[post1, post2]) |         blog1 = Blog(title="code", posts=[post1, post2]) | ||||||
|         obj_id = Blog.objects.insert(blog1, load_bulk=False) |         obj_id = Blog.objects.insert(blog1, load_bulk=False) | ||||||
|         self.assertEquals(obj_id.__class__.__name__, 'ObjectId') |         self.assertEqual(obj_id.__class__.__name__, 'ObjectId') | ||||||
|  |  | ||||||
|         Blog.drop_collection() |         Blog.drop_collection() | ||||||
|         post3 = Post(comments=[comment1, comment1]) |         post3 = Post(comments=[comment1, comment1]) | ||||||
| @@ -573,12 +626,70 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         def throw_operation_error_not_unique(): |         def throw_operation_error_not_unique(): | ||||||
|             Blog.objects.insert([blog2, blog3], safe=True) |             Blog.objects.insert([blog2, blog3], safe=True) | ||||||
|  |  | ||||||
|         self.assertRaises(OperationError, throw_operation_error_not_unique) |         self.assertRaises(NotUniqueError, throw_operation_error_not_unique) | ||||||
|         self.assertEqual(Blog.objects.count(), 2) |         self.assertEqual(Blog.objects.count(), 2) | ||||||
|  |  | ||||||
|         Blog.objects.insert([blog2, blog3], write_options={'continue_on_error': True}) |         Blog.objects.insert([blog2, blog3], write_options={'continue_on_error': True}) | ||||||
|         self.assertEqual(Blog.objects.count(), 3) |         self.assertEqual(Blog.objects.count(), 3) | ||||||
|  |  | ||||||
|  |     def test_get_changed_fields_query_count(self): | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             owns = ListField(ReferenceField('Organization')) | ||||||
|  |             projects = ListField(ReferenceField('Project')) | ||||||
|  |  | ||||||
|  |         class Organization(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             owner = ReferenceField('Person') | ||||||
|  |             employees = ListField(ReferenceField('Person')) | ||||||
|  |  | ||||||
|  |         class Project(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |         Organization.drop_collection() | ||||||
|  |         Project.drop_collection() | ||||||
|  |  | ||||||
|  |         r1 = Project(name="r1").save() | ||||||
|  |         r2 = Project(name="r2").save() | ||||||
|  |         r3 = Project(name="r3").save() | ||||||
|  |         p1 = Person(name="p1", projects=[r1, r2]).save() | ||||||
|  |         p2 = Person(name="p2", projects=[r2]).save() | ||||||
|  |         o1 = Organization(name="o1", employees=[p1]).save() | ||||||
|  |  | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             fresh_o1 = Organization.objects.get(id=o1.id) | ||||||
|  |             self.assertEqual(1, q) | ||||||
|  |             fresh_o1._get_changed_fields() | ||||||
|  |             self.assertEqual(1, q) | ||||||
|  |  | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             fresh_o1 = Organization.objects.get(id=o1.id) | ||||||
|  |             fresh_o1.save() | ||||||
|  |  | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             fresh_o1 = Organization.objects.get(id=o1.id) | ||||||
|  |             fresh_o1.save(cascade=False) | ||||||
|  |  | ||||||
|  |             self.assertEqual(q, 2) | ||||||
|  |  | ||||||
|  |         with query_counter() as q: | ||||||
|  |             self.assertEqual(q, 0) | ||||||
|  |  | ||||||
|  |             fresh_o1 = Organization.objects.get(id=o1.id) | ||||||
|  |             fresh_o1.employees.append(p2) | ||||||
|  |             fresh_o1.save(cascade=False) | ||||||
|  |  | ||||||
|  |             self.assertEqual(q, 3) | ||||||
|  |  | ||||||
|     def test_slave_okay(self): |     def test_slave_okay(self): | ||||||
|         """Ensures that a query can take slave_okay syntax |         """Ensures that a query can take slave_okay syntax | ||||||
| @@ -652,20 +763,20 @@ class QuerySetTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         docs = Doc.objects.order_by('number') |         docs = Doc.objects.order_by('number') | ||||||
|  |  | ||||||
|         self.assertEquals(docs.count(), 1000) |         self.assertEqual(docs.count(), 1000) | ||||||
|         self.assertEquals(len(docs), 1000) |         self.assertEqual(len(docs), 1000) | ||||||
|  |  | ||||||
|         docs_string = "%s" % docs |         docs_string = "%s" % docs | ||||||
|         self.assertTrue("Doc: 0" in docs_string) |         self.assertTrue("Doc: 0" in docs_string) | ||||||
|  |  | ||||||
|         self.assertEquals(docs.count(), 1000) |         self.assertEqual(docs.count(), 1000) | ||||||
|         self.assertEquals(len(docs), 1000) |         self.assertEqual(len(docs), 1000) | ||||||
|  |  | ||||||
|         # Limit and skip |         # Limit and skip | ||||||
|         self.assertEquals('[<Doc: 1>, <Doc: 2>, <Doc: 3>]', "%s" % docs[1:4]) |         self.assertEqual('[<Doc: 1>, <Doc: 2>, <Doc: 3>]', "%s" % docs[1:4]) | ||||||
|  |  | ||||||
|         self.assertEquals(docs.count(), 3) |         self.assertEqual(docs.count(), 3) | ||||||
|         self.assertEquals(len(docs), 3) |         self.assertEqual(len(docs), 3) | ||||||
|         for doc in docs: |         for doc in docs: | ||||||
|             self.assertEqual('.. queryset mid-iteration ..', repr(docs)) |             self.assertEqual('.. queryset mid-iteration ..', repr(docs)) | ||||||
|  |  | ||||||
| @@ -769,7 +880,11 @@ class QuerySetTest(unittest.TestCase): | |||||||
|     def test_filter_chaining(self): |     def test_filter_chaining(self): | ||||||
|         """Ensure filters can be chained together. |         """Ensure filters can be chained together. | ||||||
|         """ |         """ | ||||||
|  |         class Blog(Document): | ||||||
|  |             id = StringField(unique=True, primary_key=True) | ||||||
|  |  | ||||||
|         class BlogPost(Document): |         class BlogPost(Document): | ||||||
|  |             blog = ReferenceField(Blog) | ||||||
|             title = StringField() |             title = StringField() | ||||||
|             is_published = BooleanField() |             is_published = BooleanField() | ||||||
|             published_date = DateTimeField() |             published_date = DateTimeField() | ||||||
| @@ -778,13 +893,24 @@ class QuerySetTest(unittest.TestCase): | |||||||
|             def published(doc_cls, queryset): |             def published(doc_cls, queryset): | ||||||
|                 return queryset(is_published=True) |                 return queryset(is_published=True) | ||||||
|  |  | ||||||
|         blog_post_1 = BlogPost(title="Blog Post #1", |         Blog.drop_collection() | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         blog_1 = Blog(id="1") | ||||||
|  |         blog_2 = Blog(id="2") | ||||||
|  |         blog_3 = Blog(id="3") | ||||||
|  |  | ||||||
|  |         blog_1.save() | ||||||
|  |         blog_2.save() | ||||||
|  |         blog_3.save() | ||||||
|  |  | ||||||
|  |         blog_post_1 = BlogPost(blog=blog_1, title="Blog Post #1", | ||||||
|                                is_published = True, |                                is_published = True, | ||||||
|                                published_date=datetime(2010, 1, 5, 0, 0 ,0)) |                                published_date=datetime(2010, 1, 5, 0, 0 ,0)) | ||||||
|         blog_post_2 = BlogPost(title="Blog Post #2", |         blog_post_2 = BlogPost(blog=blog_2, title="Blog Post #2", | ||||||
|                                is_published = True, |                                is_published = True, | ||||||
|                                published_date=datetime(2010, 1, 6, 0, 0 ,0)) |                                published_date=datetime(2010, 1, 6, 0, 0 ,0)) | ||||||
|         blog_post_3 = BlogPost(title="Blog Post #3", |         blog_post_3 = BlogPost(blog=blog_3, title="Blog Post #3", | ||||||
|                                is_published = True, |                                is_published = True, | ||||||
|                                published_date=datetime(2010, 1, 7, 0, 0 ,0)) |                                published_date=datetime(2010, 1, 7, 0, 0 ,0)) | ||||||
|  |  | ||||||
| @@ -798,7 +924,29 @@ class QuerySetTest(unittest.TestCase): | |||||||
|             published_date__lt=datetime(2010, 1, 7, 0, 0 ,0)) |             published_date__lt=datetime(2010, 1, 7, 0, 0 ,0)) | ||||||
|         self.assertEqual(published_posts.count(), 2) |         self.assertEqual(published_posts.count(), 2) | ||||||
|  |  | ||||||
|  |  | ||||||
|  |         blog_posts = BlogPost.objects | ||||||
|  |         blog_posts = blog_posts.filter(blog__in=[blog_1, blog_2]) | ||||||
|  |         blog_posts = blog_posts.filter(blog=blog_3) | ||||||
|  |         self.assertEqual(blog_posts.count(), 0) | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
|  |         Blog.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_raw_and_merging(self): | ||||||
|  |         class Doc(Document): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         raw_query = Doc.objects(__raw__={'deleted': False, | ||||||
|  |                                 'scraped': 'yes', | ||||||
|  |                                 '$nor': [{'views.extracted': 'no'}, | ||||||
|  |                                          {'attachments.views.extracted':'no'}] | ||||||
|  |                                 })._query | ||||||
|  |  | ||||||
|  |         expected = {'deleted': False, '_types': 'Doc', 'scraped': 'yes', | ||||||
|  |                     '$nor': [{'views.extracted': 'no'}, | ||||||
|  |                              {'attachments.views.extracted': 'no'}]} | ||||||
|  |         self.assertEqual(expected, raw_query) | ||||||
|  |  | ||||||
|     def test_ordering(self): |     def test_ordering(self): | ||||||
|         """Ensure default ordering is applied and can be overridden. |         """Ensure default ordering is applied and can be overridden. | ||||||
| @@ -1024,27 +1172,27 @@ class QuerySetTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         # first three |         # first three | ||||||
|         numbers = Numbers.objects.fields(slice__n=3).get() |         numbers = Numbers.objects.fields(slice__n=3).get() | ||||||
|         self.assertEquals(numbers.n, [0, 1, 2]) |         self.assertEqual(numbers.n, [0, 1, 2]) | ||||||
|  |  | ||||||
|         # last three |         # last three | ||||||
|         numbers = Numbers.objects.fields(slice__n=-3).get() |         numbers = Numbers.objects.fields(slice__n=-3).get() | ||||||
|         self.assertEquals(numbers.n, [-3, -2, -1]) |         self.assertEqual(numbers.n, [-3, -2, -1]) | ||||||
|  |  | ||||||
|         # skip 2, limit 3 |         # skip 2, limit 3 | ||||||
|         numbers = Numbers.objects.fields(slice__n=[2, 3]).get() |         numbers = Numbers.objects.fields(slice__n=[2, 3]).get() | ||||||
|         self.assertEquals(numbers.n, [2, 3, 4]) |         self.assertEqual(numbers.n, [2, 3, 4]) | ||||||
|  |  | ||||||
|         # skip to fifth from last, limit 4 |         # skip to fifth from last, limit 4 | ||||||
|         numbers = Numbers.objects.fields(slice__n=[-5, 4]).get() |         numbers = Numbers.objects.fields(slice__n=[-5, 4]).get() | ||||||
|         self.assertEquals(numbers.n, [-5, -4, -3, -2]) |         self.assertEqual(numbers.n, [-5, -4, -3, -2]) | ||||||
|  |  | ||||||
|         # skip to fifth from last, limit 10 |         # skip to fifth from last, limit 10 | ||||||
|         numbers = Numbers.objects.fields(slice__n=[-5, 10]).get() |         numbers = Numbers.objects.fields(slice__n=[-5, 10]).get() | ||||||
|         self.assertEquals(numbers.n, [-5, -4, -3, -2, -1]) |         self.assertEqual(numbers.n, [-5, -4, -3, -2, -1]) | ||||||
|  |  | ||||||
|         # skip to fifth from last, limit 10 dict method |         # skip to fifth from last, limit 10 dict method | ||||||
|         numbers = Numbers.objects.fields(n={"$slice": [-5, 10]}).get() |         numbers = Numbers.objects.fields(n={"$slice": [-5, 10]}).get() | ||||||
|         self.assertEquals(numbers.n, [-5, -4, -3, -2, -1]) |         self.assertEqual(numbers.n, [-5, -4, -3, -2, -1]) | ||||||
|  |  | ||||||
|     def test_slicing_nested_fields(self): |     def test_slicing_nested_fields(self): | ||||||
|         """Ensure that query slicing an embedded array works. |         """Ensure that query slicing an embedded array works. | ||||||
| @@ -1064,27 +1212,27 @@ class QuerySetTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         # first three |         # first three | ||||||
|         numbers = Numbers.objects.fields(slice__embedded__n=3).get() |         numbers = Numbers.objects.fields(slice__embedded__n=3).get() | ||||||
|         self.assertEquals(numbers.embedded.n, [0, 1, 2]) |         self.assertEqual(numbers.embedded.n, [0, 1, 2]) | ||||||
|  |  | ||||||
|         # last three |         # last three | ||||||
|         numbers = Numbers.objects.fields(slice__embedded__n=-3).get() |         numbers = Numbers.objects.fields(slice__embedded__n=-3).get() | ||||||
|         self.assertEquals(numbers.embedded.n, [-3, -2, -1]) |         self.assertEqual(numbers.embedded.n, [-3, -2, -1]) | ||||||
|  |  | ||||||
|         # skip 2, limit 3 |         # skip 2, limit 3 | ||||||
|         numbers = Numbers.objects.fields(slice__embedded__n=[2, 3]).get() |         numbers = Numbers.objects.fields(slice__embedded__n=[2, 3]).get() | ||||||
|         self.assertEquals(numbers.embedded.n, [2, 3, 4]) |         self.assertEqual(numbers.embedded.n, [2, 3, 4]) | ||||||
|  |  | ||||||
|         # skip to fifth from last, limit 4 |         # skip to fifth from last, limit 4 | ||||||
|         numbers = Numbers.objects.fields(slice__embedded__n=[-5, 4]).get() |         numbers = Numbers.objects.fields(slice__embedded__n=[-5, 4]).get() | ||||||
|         self.assertEquals(numbers.embedded.n, [-5, -4, -3, -2]) |         self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2]) | ||||||
|  |  | ||||||
|         # skip to fifth from last, limit 10 |         # skip to fifth from last, limit 10 | ||||||
|         numbers = Numbers.objects.fields(slice__embedded__n=[-5, 10]).get() |         numbers = Numbers.objects.fields(slice__embedded__n=[-5, 10]).get() | ||||||
|         self.assertEquals(numbers.embedded.n, [-5, -4, -3, -2, -1]) |         self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1]) | ||||||
|  |  | ||||||
|         # skip to fifth from last, limit 10 dict method |         # skip to fifth from last, limit 10 dict method | ||||||
|         numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get() |         numbers = Numbers.objects.fields(embedded__n={"$slice": [-5, 10]}).get() | ||||||
|         self.assertEquals(numbers.embedded.n, [-5, -4, -3, -2, -1]) |         self.assertEqual(numbers.embedded.n, [-5, -4, -3, -2, -1]) | ||||||
|  |  | ||||||
|     def test_find_embedded(self): |     def test_find_embedded(self): | ||||||
|         """Ensure that an embedded document is properly returned from a query. |         """Ensure that an embedded document is properly returned from a query. | ||||||
| @@ -1168,7 +1316,6 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         published_posts = (post1, post2, post3, post5, post6) |         published_posts = (post1, post2, post3, post5, post6) | ||||||
|         self.assertTrue(all(obj.id in posts for obj in published_posts)) |         self.assertTrue(all(obj.id in posts for obj in published_posts)) | ||||||
|  |  | ||||||
|  |  | ||||||
|         # Check Q object combination |         # Check Q object combination | ||||||
|         date = datetime(2010, 1, 10) |         date = datetime(2010, 1, 10) | ||||||
|         q = BlogPost.objects(Q(publish_date__lte=date) | Q(published=True)) |         q = BlogPost.objects(Q(publish_date__lte=date) | Q(published=True)) | ||||||
| @@ -1227,6 +1374,42 @@ class QuerySetTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_raw_query_and_Q_objects(self): | ||||||
|  |         """ | ||||||
|  |         Test raw plays nicely | ||||||
|  |         """ | ||||||
|  |         class Foo(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             a = StringField() | ||||||
|  |             b = StringField() | ||||||
|  |             c = StringField() | ||||||
|  |  | ||||||
|  |             meta = { | ||||||
|  |                 'allow_inheritance': False | ||||||
|  |             } | ||||||
|  |  | ||||||
|  |         query = Foo.objects(__raw__={'$nor': [{'name': 'bar'}]})._query | ||||||
|  |         self.assertEqual(query, {'$nor': [{'name': 'bar'}]}) | ||||||
|  |  | ||||||
|  |         q1 = {'$or': [{'a': 1}, {'b': 1}]} | ||||||
|  |         query = Foo.objects(Q(__raw__=q1) & Q(c=1))._query | ||||||
|  |         self.assertEqual(query, {'$or': [{'a': 1}, {'b': 1}], 'c': 1}) | ||||||
|  |  | ||||||
|  |     def test_q_merge_queries_edge_case(self): | ||||||
|  |  | ||||||
|  |         class User(Document): | ||||||
|  |             email = EmailField(required=False) | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         User.drop_collection() | ||||||
|  |         pk = ObjectId() | ||||||
|  |         User(email='example@example.com', pk=pk).save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(1, User.objects.filter( | ||||||
|  |                                 Q(email='example@example.com') | | ||||||
|  |                                 Q(name='John Doe') | ||||||
|  |                                 ).limit(2).filter(pk=pk).count()) | ||||||
|  |  | ||||||
|     def test_exec_js_query(self): |     def test_exec_js_query(self): | ||||||
|         """Ensure that queries are properly formed for use in exec_js. |         """Ensure that queries are properly formed for use in exec_js. | ||||||
|         """ |         """ | ||||||
| @@ -1325,7 +1508,7 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         # Test template style |         # Test template style | ||||||
|         code = "{{~comments.content}}" |         code = "{{~comments.content}}" | ||||||
|         sub_code = BlogPost.objects._sub_js_fields(code) |         sub_code = BlogPost.objects._sub_js_fields(code) | ||||||
|         self.assertEquals("cmnts.body", sub_code) |         self.assertEqual("cmnts.body", sub_code) | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
| @@ -1366,12 +1549,15 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         self.assertEqual(1, BlogPost.objects.count()) |         self.assertEqual(1, BlogPost.objects.count()) | ||||||
|  |  | ||||||
|     def test_reverse_delete_rule_cascade_self_referencing(self): |     def test_reverse_delete_rule_cascade_self_referencing(self): | ||||||
|         """Ensure self-referencing CASCADE deletes do not result in infinite loop |         """Ensure self-referencing CASCADE deletes do not result in infinite | ||||||
|  |         loop | ||||||
|         """ |         """ | ||||||
|         class Category(Document): |         class Category(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             parent = ReferenceField('self', reverse_delete_rule=CASCADE) |             parent = ReferenceField('self', reverse_delete_rule=CASCADE) | ||||||
|  |  | ||||||
|  |         Category.drop_collection() | ||||||
|  |  | ||||||
|         num_children = 3 |         num_children = 3 | ||||||
|         base = Category(name='Root') |         base = Category(name='Root') | ||||||
|         base.save() |         base.save() | ||||||
| @@ -1388,13 +1574,13 @@ class QuerySetTest(unittest.TestCase): | |||||||
|                 child_child.save() |                 child_child.save() | ||||||
|  |  | ||||||
|         tree_size = 1 + num_children + (num_children * num_children) |         tree_size = 1 + num_children + (num_children * num_children) | ||||||
|         self.assertEquals(tree_size, Category.objects.count()) |         self.assertEqual(tree_size, Category.objects.count()) | ||||||
|         self.assertEquals(num_children, Category.objects(parent=base).count()) |         self.assertEqual(num_children, Category.objects(parent=base).count()) | ||||||
|  |  | ||||||
|         # The delete should effectively wipe out the Category collection |         # The delete should effectively wipe out the Category collection | ||||||
|         # without resulting in infinite parent-child cascade recursion |         # without resulting in infinite parent-child cascade recursion | ||||||
|         base.delete() |         base.delete() | ||||||
|         self.assertEquals(0, Category.objects.count()) |         self.assertEqual(0, Category.objects.count()) | ||||||
|  |  | ||||||
|     def test_reverse_delete_rule_nullify(self): |     def test_reverse_delete_rule_nullify(self): | ||||||
|         """Ensure nullification of references to deleted documents. |         """Ensure nullification of references to deleted documents. | ||||||
| @@ -1470,6 +1656,40 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         self.assertEqual(post.authors, [me]) |         self.assertEqual(post.authors, [me]) | ||||||
|         self.assertEqual(another.authors, []) |         self.assertEqual(another.authors, []) | ||||||
|  |  | ||||||
|  |     def test_delete_with_limits(self): | ||||||
|  |  | ||||||
|  |         class Log(Document): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         Log.drop_collection() | ||||||
|  |  | ||||||
|  |         for i in xrange(10): | ||||||
|  |             Log().save() | ||||||
|  |  | ||||||
|  |         Log.objects()[3:5].delete() | ||||||
|  |         self.assertEqual(8, Log.objects.count()) | ||||||
|  |  | ||||||
|  |     def test_delete_with_limit_handles_delete_rules(self): | ||||||
|  |         """Ensure cascading deletion of referring documents from the database. | ||||||
|  |         """ | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             content = StringField() | ||||||
|  |             author = ReferenceField(self.Person, reverse_delete_rule=CASCADE) | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         me = self.Person(name='Test User') | ||||||
|  |         me.save() | ||||||
|  |         someoneelse = self.Person(name='Some-one Else') | ||||||
|  |         someoneelse.save() | ||||||
|  |  | ||||||
|  |         BlogPost(content='Watching TV', author=me).save() | ||||||
|  |         BlogPost(content='Chilling out', author=me).save() | ||||||
|  |         BlogPost(content='Pro Testing', author=someoneelse).save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(3, BlogPost.objects.count()) | ||||||
|  |         self.Person.objects()[:1].delete() | ||||||
|  |         self.assertEqual(1, BlogPost.objects.count()) | ||||||
|  |  | ||||||
|     def test_update(self): |     def test_update(self): | ||||||
|         """Ensure that atomic updates work properly. |         """Ensure that atomic updates work properly. | ||||||
|         """ |         """ | ||||||
| @@ -1520,7 +1740,7 @@ class QuerySetTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|     def test_update_push_and_pull(self): |     def test_update_push_and_pull_add_to_set(self): | ||||||
|         """Ensure that the 'pull' update operation works correctly. |         """Ensure that the 'pull' update operation works correctly. | ||||||
|         """ |         """ | ||||||
|         class BlogPost(Document): |         class BlogPost(Document): | ||||||
| @@ -1553,6 +1773,23 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         post.reload() |         post.reload() | ||||||
|         self.assertEqual(post.tags, ["code", "mongodb"]) |         self.assertEqual(post.tags, ["code", "mongodb"]) | ||||||
|  |  | ||||||
|  |     def test_add_to_set_each(self): | ||||||
|  |         class Item(Document): | ||||||
|  |             name = StringField(required=True) | ||||||
|  |             description = StringField(max_length=50) | ||||||
|  |             parents = ListField(ReferenceField('self')) | ||||||
|  |  | ||||||
|  |         Item.drop_collection() | ||||||
|  |  | ||||||
|  |         item = Item(name='test item').save() | ||||||
|  |         parent_1 = Item(name='parent 1').save() | ||||||
|  |         parent_2 = Item(name='parent 2').save() | ||||||
|  |  | ||||||
|  |         item.update(add_to_set__parents=[parent_1, parent_2, parent_1]) | ||||||
|  |         item.reload() | ||||||
|  |  | ||||||
|  |         self.assertEqual([parent_1, parent_2], item.parents) | ||||||
|  |  | ||||||
|     def test_pull_nested(self): |     def test_pull_nested(self): | ||||||
|  |  | ||||||
|         class User(Document): |         class User(Document): | ||||||
| @@ -1638,7 +1875,7 @@ class QuerySetTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         BlogPost.objects(slug="test-2").update_one(set__tags__0__name="python") |         BlogPost.objects(slug="test-2").update_one(set__tags__0__name="python") | ||||||
|         post.reload() |         post.reload() | ||||||
|         self.assertEquals(post.tags[0].name, 'python') |         self.assertEqual(post.tags[0].name, 'python') | ||||||
|  |  | ||||||
|         BlogPost.objects(slug="test-2").update_one(pop__tags=-1) |         BlogPost.objects(slug="test-2").update_one(pop__tags=-1) | ||||||
|         post.reload() |         post.reload() | ||||||
| @@ -1665,7 +1902,7 @@ class QuerySetTest(unittest.TestCase): | |||||||
|             set__authors__S=Author(name="Ross")) |             set__authors__S=Author(name="Ross")) | ||||||
|  |  | ||||||
|         message = message.reload() |         message = message.reload() | ||||||
|         self.assertEquals(message.authors[0].name, "Ross") |         self.assertEqual(message.authors[0].name, "Ross") | ||||||
|  |  | ||||||
|         Message.objects(authors__name="Ross").update_one( |         Message.objects(authors__name="Ross").update_one( | ||||||
|             set__authors=[Author(name="Harry"), |             set__authors=[Author(name="Harry"), | ||||||
| @@ -1673,9 +1910,9 @@ class QuerySetTest(unittest.TestCase): | |||||||
|                           Author(name="Adam")]) |                           Author(name="Adam")]) | ||||||
|  |  | ||||||
|         message = message.reload() |         message = message.reload() | ||||||
|         self.assertEquals(message.authors[0].name, "Harry") |         self.assertEqual(message.authors[0].name, "Harry") | ||||||
|         self.assertEquals(message.authors[1].name, "Ross") |         self.assertEqual(message.authors[1].name, "Ross") | ||||||
|         self.assertEquals(message.authors[2].name, "Adam") |         self.assertEqual(message.authors[2].name, "Adam") | ||||||
|  |  | ||||||
|     def test_order_by(self): |     def test_order_by(self): | ||||||
|         """Ensure that QuerySets may be ordered. |         """Ensure that QuerySets may be ordered. | ||||||
| @@ -1755,10 +1992,10 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         results = list(results) |         results = list(results) | ||||||
|         self.assertEqual(len(results), 4) |         self.assertEqual(len(results), 4) | ||||||
|  |  | ||||||
|         music = filter(lambda r: r.key == "music", results)[0] |         music = list(filter(lambda r: r.key == "music", results))[0] | ||||||
|         self.assertEqual(music.value, 2) |         self.assertEqual(music.value, 2) | ||||||
|  |  | ||||||
|         film = filter(lambda r: r.key == "film", results)[0] |         film = list(filter(lambda r: r.key == "film", results))[0] | ||||||
|         self.assertEqual(film.value, 3) |         self.assertEqual(film.value, 3) | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
| @@ -1977,9 +2214,9 @@ class QuerySetTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         # Check item_frequencies works for non-list fields |         # Check item_frequencies works for non-list fields | ||||||
|         def test_assertions(f): |         def test_assertions(f): | ||||||
|             self.assertEqual(set(['1', '2']), set(f.keys())) |             self.assertEqual(set([1, 2]), set(f.keys())) | ||||||
|             self.assertEqual(f['1'], 1) |             self.assertEqual(f[1], 1) | ||||||
|             self.assertEqual(f['2'], 2) |             self.assertEqual(f[2], 2) | ||||||
|  |  | ||||||
|         exec_js = BlogPost.objects.item_frequencies('hits') |         exec_js = BlogPost.objects.item_frequencies('hits') | ||||||
|         map_reduce = BlogPost.objects.item_frequencies('hits', map_reduce=True) |         map_reduce = BlogPost.objects.item_frequencies('hits', map_reduce=True) | ||||||
| @@ -2058,15 +2295,15 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         Person(name="Wilson Jr").save() |         Person(name="Wilson Jr").save() | ||||||
|  |  | ||||||
|         freq = Person.objects.item_frequencies('city') |         freq = Person.objects.item_frequencies('city') | ||||||
|         self.assertEquals(freq, {'CRB': 1.0, None: 1.0}) |         self.assertEqual(freq, {'CRB': 1.0, None: 1.0}) | ||||||
|         freq = Person.objects.item_frequencies('city', normalize=True) |         freq = Person.objects.item_frequencies('city', normalize=True) | ||||||
|         self.assertEquals(freq, {'CRB': 0.5, None: 0.5}) |         self.assertEqual(freq, {'CRB': 0.5, None: 0.5}) | ||||||
|  |  | ||||||
|  |  | ||||||
|         freq = Person.objects.item_frequencies('city', map_reduce=True) |         freq = Person.objects.item_frequencies('city', map_reduce=True) | ||||||
|         self.assertEquals(freq, {'CRB': 1.0, None: 1.0}) |         self.assertEqual(freq, {'CRB': 1.0, None: 1.0}) | ||||||
|         freq = Person.objects.item_frequencies('city', normalize=True, map_reduce=True) |         freq = Person.objects.item_frequencies('city', normalize=True, map_reduce=True) | ||||||
|         self.assertEquals(freq, {'CRB': 0.5, None: 0.5}) |         self.assertEqual(freq, {'CRB': 0.5, None: 0.5}) | ||||||
|  |  | ||||||
|     def test_item_frequencies_with_null_embedded(self): |     def test_item_frequencies_with_null_embedded(self): | ||||||
|         class Data(EmbeddedDocument): |         class Data(EmbeddedDocument): | ||||||
| @@ -2079,7 +2316,6 @@ class QuerySetTest(unittest.TestCase): | |||||||
|             data = EmbeddedDocumentField(Data, required=True) |             data = EmbeddedDocumentField(Data, required=True) | ||||||
|             extra = EmbeddedDocumentField(Extra) |             extra = EmbeddedDocumentField(Extra) | ||||||
|  |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |         Person.drop_collection() | ||||||
|  |  | ||||||
|         p = Person() |         p = Person() | ||||||
| @@ -2092,10 +2328,56 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         p.save() |         p.save() | ||||||
|  |  | ||||||
|         ot = Person.objects.item_frequencies('extra.tag', map_reduce=False) |         ot = Person.objects.item_frequencies('extra.tag', map_reduce=False) | ||||||
|         self.assertEquals(ot, {None: 1.0, u'friend': 1.0}) |         self.assertEqual(ot, {None: 1.0, u'friend': 1.0}) | ||||||
|  |  | ||||||
|         ot = Person.objects.item_frequencies('extra.tag', map_reduce=True) |         ot = Person.objects.item_frequencies('extra.tag', map_reduce=True) | ||||||
|         self.assertEquals(ot, {None: 1.0, u'friend': 1.0}) |         self.assertEqual(ot, {None: 1.0, u'friend': 1.0}) | ||||||
|  |  | ||||||
|  |     def test_item_frequencies_with_0_values(self): | ||||||
|  |         class Test(Document): | ||||||
|  |             val = IntField() | ||||||
|  |  | ||||||
|  |         Test.drop_collection() | ||||||
|  |         t = Test() | ||||||
|  |         t.val = 0 | ||||||
|  |         t.save() | ||||||
|  |  | ||||||
|  |         ot = Test.objects.item_frequencies('val', map_reduce=True) | ||||||
|  |         self.assertEqual(ot, {0: 1}) | ||||||
|  |         ot = Test.objects.item_frequencies('val', map_reduce=False) | ||||||
|  |         self.assertEqual(ot, {0: 1}) | ||||||
|  |  | ||||||
|  |     def test_item_frequencies_with_False_values(self): | ||||||
|  |         class Test(Document): | ||||||
|  |             val = BooleanField() | ||||||
|  |  | ||||||
|  |         Test.drop_collection() | ||||||
|  |         t = Test() | ||||||
|  |         t.val = False | ||||||
|  |         t.save() | ||||||
|  |  | ||||||
|  |         ot = Test.objects.item_frequencies('val', map_reduce=True) | ||||||
|  |         self.assertEqual(ot, {False: 1}) | ||||||
|  |         ot = Test.objects.item_frequencies('val', map_reduce=False) | ||||||
|  |         self.assertEqual(ot, {False: 1}) | ||||||
|  |  | ||||||
|  |     def test_item_frequencies_normalize(self): | ||||||
|  |         class Test(Document): | ||||||
|  |             val = IntField() | ||||||
|  |  | ||||||
|  |         Test.drop_collection() | ||||||
|  |  | ||||||
|  |         for i in xrange(50): | ||||||
|  |             Test(val=1).save() | ||||||
|  |  | ||||||
|  |         for i in xrange(20): | ||||||
|  |             Test(val=2).save() | ||||||
|  |  | ||||||
|  |         freqs = Test.objects.item_frequencies('val', map_reduce=False, normalize=True) | ||||||
|  |         self.assertEqual(freqs, {1: 50.0/70, 2: 20.0/70}) | ||||||
|  |  | ||||||
|  |         freqs = Test.objects.item_frequencies('val', map_reduce=True, normalize=True) | ||||||
|  |         self.assertEqual(freqs, {1: 50.0/70, 2: 20.0/70}) | ||||||
|  |  | ||||||
|     def test_average(self): |     def test_average(self): | ||||||
|         """Ensure that field can be averaged correctly. |         """Ensure that field can be averaged correctly. | ||||||
| @@ -2155,7 +2437,29 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         foo = Foo(bar=bar) |         foo = Foo(bar=bar) | ||||||
|         foo.save() |         foo.save() | ||||||
|  |  | ||||||
|         self.assertEquals(Foo.objects.distinct("bar"), [bar]) |         self.assertEqual(Foo.objects.distinct("bar"), [bar]) | ||||||
|  |  | ||||||
|  |     def test_distinct_handles_references_to_alias(self): | ||||||
|  |         register_connection('testdb', 'mongoenginetest2') | ||||||
|  |  | ||||||
|  |         class Foo(Document): | ||||||
|  |             bar = ReferenceField("Bar") | ||||||
|  |             meta = {'db_alias': 'testdb'} | ||||||
|  |  | ||||||
|  |         class Bar(Document): | ||||||
|  |             text = StringField() | ||||||
|  |             meta = {'db_alias': 'testdb'} | ||||||
|  |  | ||||||
|  |         Bar.drop_collection() | ||||||
|  |         Foo.drop_collection() | ||||||
|  |  | ||||||
|  |         bar = Bar(text="hi") | ||||||
|  |         bar.save() | ||||||
|  |  | ||||||
|  |         foo = Foo(bar=bar) | ||||||
|  |         foo.save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(Foo.objects.distinct("bar"), [bar]) | ||||||
|  |  | ||||||
|     def test_custom_manager(self): |     def test_custom_manager(self): | ||||||
|         """Ensure that custom QuerySetManager instances work as expected. |         """Ensure that custom QuerySetManager instances work as expected. | ||||||
| @@ -2166,28 +2470,29 @@ class QuerySetTest(unittest.TestCase): | |||||||
|             date = DateTimeField(default=datetime.now) |             date = DateTimeField(default=datetime.now) | ||||||
|  |  | ||||||
|             @queryset_manager |             @queryset_manager | ||||||
|             def objects(doc_cls, queryset): |             def objects(cls, qryset): | ||||||
|                 return queryset(deleted=False) |                 opts = {"deleted": False} | ||||||
|  |                 return qryset(**opts) | ||||||
|  |  | ||||||
|             @queryset_manager |             @queryset_manager | ||||||
|             def music_posts(doc_cls, queryset): |             def music_posts(doc_cls, queryset, deleted=False): | ||||||
|                 return queryset(tags='music', deleted=False).order_by('-date') |                 return queryset(tags='music', | ||||||
|  |                                 deleted=deleted).order_by('date') | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|         post1 = BlogPost(tags=['music', 'film']) |         post1 = BlogPost(tags=['music', 'film']).save() | ||||||
|         post1.save() |         post2 = BlogPost(tags=['music']).save() | ||||||
|         post2 = BlogPost(tags=['music']) |         post3 = BlogPost(tags=['film', 'actors']).save() | ||||||
|         post2.save() |         post4 = BlogPost(tags=['film', 'actors', 'music'], deleted=True).save() | ||||||
|         post3 = BlogPost(tags=['film', 'actors']) |  | ||||||
|         post3.save() |  | ||||||
|         post4 = BlogPost(tags=['film', 'actors'], deleted=True) |  | ||||||
|         post4.save() |  | ||||||
|  |  | ||||||
|         self.assertEqual([p.id for p in BlogPost.objects], |         self.assertEqual([p.id for p in BlogPost.objects()], | ||||||
|                          [post1.id, post2.id, post3.id]) |                          [post1.id, post2.id, post3.id]) | ||||||
|         self.assertEqual([p.id for p in BlogPost.music_posts], |         self.assertEqual([p.id for p in BlogPost.music_posts()], | ||||||
|                          [post2.id, post1.id]) |                          [post1.id, post2.id]) | ||||||
|  |  | ||||||
|  |         self.assertEqual([p.id for p in BlogPost.music_posts(True)], | ||||||
|  |                          [post4.id]) | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
| @@ -2312,30 +2617,48 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         """Ensure that index_types will, when disabled, prevent _types |         """Ensure that index_types will, when disabled, prevent _types | ||||||
|         being added to all indices. |         being added to all indices. | ||||||
|         """ |         """ | ||||||
|         class BlogPost(Document): |         class BloggPost(Document): | ||||||
|             date = DateTimeField() |             date = DateTimeField() | ||||||
|             meta = {'index_types': False, |             meta = {'index_types': False, | ||||||
|                     'indexes': ['-date']} |                     'indexes': ['-date']} | ||||||
|  |  | ||||||
|         # Indexes are lazy so use list() to perform query |         # Indexes are lazy so use list() to perform query | ||||||
|         list(BlogPost.objects) |         list(BloggPost.objects) | ||||||
|         info = BlogPost.objects._collection.index_information() |         info = BloggPost.objects._collection.index_information() | ||||||
|         info = [value['key'] for key, value in info.iteritems()] |         info = [value['key'] for key, value in info.iteritems()] | ||||||
|         self.assertTrue([('_types', 1)] not in info) |         self.assertTrue([('_types', 1)] not in info) | ||||||
|         self.assertTrue([('date', -1)] in info) |         self.assertTrue([('date', -1)] in info) | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |         BloggPost.drop_collection() | ||||||
|  |  | ||||||
|         class BlogPost(Document): |         class BloggPost(Document): | ||||||
|             title = StringField() |             title = StringField() | ||||||
|             meta = {'allow_inheritance': False} |             meta = {'allow_inheritance': False} | ||||||
|  |  | ||||||
|         # _types is not used on objects where allow_inheritance is False |         # _types is not used on objects where allow_inheritance is False | ||||||
|         list(BlogPost.objects) |         list(BloggPost.objects) | ||||||
|         info = BlogPost.objects._collection.index_information() |         info = BloggPost.objects._collection.index_information() | ||||||
|         self.assertFalse([('_types', 1)] in info.values()) |         self.assertFalse([('_types', 1)] in info.values()) | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |         BloggPost.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_types_index_with_pk(self): | ||||||
|  |  | ||||||
|  |         class Comment(EmbeddedDocument): | ||||||
|  |             comment_id = IntField(required=True) | ||||||
|  |  | ||||||
|  |         try: | ||||||
|  |             class BlogPost(Document): | ||||||
|  |                 comments = EmbeddedDocumentField(Comment) | ||||||
|  |                 meta = {'indexes': [{'fields': ['pk', 'comments.comment_id'], | ||||||
|  |                     'unique': True}]} | ||||||
|  |         except UnboundLocalError: | ||||||
|  |             self.fail('Unbound local error at types index + pk definition') | ||||||
|  |  | ||||||
|  |         info = BlogPost.objects._collection.index_information() | ||||||
|  |         info = [value['key'] for key, value in info.iteritems()] | ||||||
|  |         index_item = [(u'_types', 1), (u'_id', 1), (u'comments.comment_id', 1)] | ||||||
|  |         self.assertTrue(index_item in info) | ||||||
|  |  | ||||||
|     def test_dict_with_custom_baseclass(self): |     def test_dict_with_custom_baseclass(self): | ||||||
|         """Ensure DictField working with custom base clases. |         """Ensure DictField working with custom base clases. | ||||||
| @@ -2614,8 +2937,8 @@ class QuerySetTest(unittest.TestCase): | |||||||
|  |  | ||||||
|         Post().save() |         Post().save() | ||||||
|         Post(is_published=True).save() |         Post(is_published=True).save() | ||||||
|         self.assertEquals(Post.objects.count(), 2) |         self.assertEqual(Post.objects.count(), 2) | ||||||
|         self.assertEquals(Post.published.count(), 1) |         self.assertEqual(Post.published.count(), 1) | ||||||
|  |  | ||||||
|         Post.drop_collection() |         Post.drop_collection() | ||||||
|  |  | ||||||
| @@ -2781,10 +3104,10 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         Number(n=3).save() |         Number(n=3).save() | ||||||
|  |  | ||||||
|         numbers = [n.n for n in Number.objects.order_by('-n')] |         numbers = [n.n for n in Number.objects.order_by('-n')] | ||||||
|         self.assertEquals([3, 2, 1], numbers) |         self.assertEqual([3, 2, 1], numbers) | ||||||
|  |  | ||||||
|         numbers = [n.n for n in Number.objects.order_by('+n')] |         numbers = [n.n for n in Number.objects.order_by('+n')] | ||||||
|         self.assertEquals([1, 2, 3], numbers) |         self.assertEqual([1, 2, 3], numbers) | ||||||
|         Number.drop_collection() |         Number.drop_collection() | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -3106,15 +3429,22 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         self.assertEqual(len(self.Person.objects.scalar('name')), 55) |         self.assertEqual(len(self.Person.objects.scalar('name')), 55) | ||||||
|         self.assertEqual("A0", "%s" % self.Person.objects.order_by('name').scalar('name').first()) |         self.assertEqual("A0", "%s" % self.Person.objects.order_by('name').scalar('name').first()) | ||||||
|         self.assertEqual("A0", "%s" % self.Person.objects.scalar('name').order_by('name')[0]) |         self.assertEqual("A0", "%s" % self.Person.objects.scalar('name').order_by('name')[0]) | ||||||
|         self.assertEqual("[u'A1', u'A2']",  "%s" % self.Person.objects.order_by('age').scalar('name')[1:3]) |         if PY3: | ||||||
|         self.assertEqual("[u'A51', u'A52']",  "%s" % self.Person.objects.order_by('age').scalar('name')[51:53]) |             self.assertEqual("['A1', 'A2']",  "%s" % self.Person.objects.order_by('age').scalar('name')[1:3]) | ||||||
|  |             self.assertEqual("['A51', 'A52']",  "%s" % self.Person.objects.order_by('age').scalar('name')[51:53]) | ||||||
|  |         else: | ||||||
|  |             self.assertEqual("[u'A1', u'A2']",  "%s" % self.Person.objects.order_by('age').scalar('name')[1:3]) | ||||||
|  |             self.assertEqual("[u'A51', u'A52']",  "%s" % self.Person.objects.order_by('age').scalar('name')[51:53]) | ||||||
|  |  | ||||||
|         # with_id and in_bulk |         # with_id and in_bulk | ||||||
|         person = self.Person.objects.order_by('name').first() |         person = self.Person.objects.order_by('name').first() | ||||||
|         self.assertEqual("A0", "%s" % self.Person.objects.scalar('name').with_id(person.id)) |         self.assertEqual("A0", "%s" % self.Person.objects.scalar('name').with_id(person.id)) | ||||||
|  |  | ||||||
|         pks = self.Person.objects.order_by('age').scalar('pk')[1:3] |         pks = self.Person.objects.order_by('age').scalar('pk')[1:3] | ||||||
|         self.assertEqual("[u'A1', u'A2']",  "%s" % sorted(self.Person.objects.scalar('name').in_bulk(list(pks)).values())) |         if PY3: | ||||||
|  |             self.assertEqual("['A1', 'A2']",  "%s" % sorted(self.Person.objects.scalar('name').in_bulk(list(pks)).values())) | ||||||
|  |         else: | ||||||
|  |             self.assertEqual("[u'A1', u'A2']",  "%s" % sorted(self.Person.objects.scalar('name').in_bulk(list(pks)).values())) | ||||||
|  |  | ||||||
|  |  | ||||||
| class QTest(unittest.TestCase): | class QTest(unittest.TestCase): | ||||||
|   | |||||||
| @@ -1,4 +1,5 @@ | |||||||
| import unittest | import unittest | ||||||
|  |  | ||||||
| import pymongo | import pymongo | ||||||
| from pymongo import ReadPreference, ReplicaSetConnection | from pymongo import ReadPreference, ReplicaSetConnection | ||||||
|  |  | ||||||
| @@ -26,7 +27,7 @@ class ConnectionTest(unittest.TestCase): | |||||||
|         if not isinstance(conn, ReplicaSetConnection): |         if not isinstance(conn, ReplicaSetConnection): | ||||||
|             return |             return | ||||||
|  |  | ||||||
|         self.assertEquals(conn.read_preference, ReadPreference.SECONDARY_ONLY) |         self.assertEqual(conn.read_preference, ReadPreference.SECONDARY_ONLY) | ||||||
|  |  | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|     unittest.main() |     unittest.main() | ||||||
|   | |||||||
| @@ -212,9 +212,9 @@ class SignalTests(unittest.TestCase): | |||||||
|  |  | ||||||
|         # The output of this signal is not entirely deterministic. The reloaded |         # The output of this signal is not entirely deterministic. The reloaded | ||||||
|         # object will have an object ID. Hence, we only check part of the output |         # object will have an object ID. Hence, we only check part of the output | ||||||
|         self.assertEquals(signal_output[3], |         self.assertEqual(signal_output[3], | ||||||
|             "pre_bulk_insert signal, [<Author: Bill Shakespeare>]") |             "pre_bulk_insert signal, [<Author: Bill Shakespeare>]") | ||||||
|         self.assertEquals(signal_output[-2:], |         self.assertEqual(signal_output[-2:], | ||||||
|             ["post_bulk_insert signal, [<Author: Bill Shakespeare>]", |             ["post_bulk_insert signal, [<Author: Bill Shakespeare>]", | ||||||
|              "Is loaded",]) |              "Is loaded",]) | ||||||
|  |  | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user