Compare commits
	
		
			351 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | efeaba39a4 | ||
|  | 1a97dfd479 | ||
|  | 9fecf2b303 | ||
|  | 3d0d2f48ad | ||
|  | 581605e0e2 | ||
|  | 45d3a7f6ff | ||
|  | 7ca2ea0766 | ||
|  | 89220c142b | ||
|  | c73ce3d220 | ||
|  | b0f127af4e | ||
|  | 766d54795f | ||
|  | bd41c6eea4 | ||
|  | 2435786713 | ||
|  | 9e7ea64bd2 | ||
|  | 89a6eee6af | ||
|  | 2ec1476e50 | ||
|  | 2d9b581f34 | ||
|  | 5bb63f645b | ||
|  | a856c7cc37 | ||
|  | 26db9d8a9d | ||
|  | 8060179f6d | ||
|  | 77ebd87fed | ||
|  | e4bc92235d | ||
|  | 27a4d83ce8 | ||
|  | ece9b902f8 | ||
|  | 65a2f8a68b | ||
|  | 9c212306b8 | ||
|  | 1fdc7ce6bb | ||
|  | 0b22c140c5 | ||
|  | 944aa45459 | ||
|  | c9842ba13a | ||
|  | 8840680303 | ||
|  | 376b9b1316 | ||
|  | 54bb1cb3d9 | ||
|  | 43468b474e | ||
|  | 28a957c684 | ||
|  | ec5ddbf391 | ||
|  | bab186e195 | ||
|  | bc7e874476 | ||
|  | 97114b5948 | ||
|  | 45e015d71d | ||
|  | 0ff6531953 | ||
|  | ba298c3cfc | ||
|  | 0479bea40b | ||
|  | a536097804 | ||
|  | bbefd0fdf9 | ||
|  | 2aa8b04c21 | ||
|  | aeebdfec51 | ||
|  | debfcdf498 | ||
|  | 5c4b33e8e6 | ||
|  | eb54037b66 | ||
|  | f48af8db3b | ||
|  | 97c5b957dd | ||
|  | 95e7397803 | ||
|  | 43a989978a | ||
|  | 27734a7c26 | ||
|  | dd786d6fc4 | ||
|  | be1c28fc45 | ||
|  | 20e41b3523 | ||
|  | e07ecc5cf8 | ||
|  | 3360b72531 | ||
|  | 233b13d670 | ||
|  | 5bcbb4fdaa | ||
|  | dbe2f5f2b8 | ||
|  | ca8b58d66d | ||
|  | f80f0b416f | ||
|  | d7765511ee | ||
|  | 0240a09056 | ||
|  | ab15c4eec9 | ||
|  | 4ce1ba81a6 | ||
|  | 530440b333 | ||
|  | b80fda36af | ||
|  | 42d24263ef | ||
|  | 1e2797e7ce | ||
|  | f7075766fc | ||
|  | 5647ca70bb | ||
|  | 2b8aa6bafc | ||
|  | 410443471c | ||
|  | 0bb9781b91 | ||
|  | 2769d6d7ca | ||
|  | 120b9433c2 | ||
|  | 605092bd88 | ||
|  | a4a8c94374 | ||
|  | 0e93f6c0db | ||
|  | aa2add39ad | ||
|  | a928047147 | ||
|  | c474ca0f13 | ||
|  | 88dc64653e | ||
|  | 5f4b70f3a9 | ||
|  | 51b429e5b0 | ||
|  | 360624eb6e | ||
|  | d9d2291837 | ||
|  | cbdf816232 | ||
|  | 2d71eb8a18 | ||
|  | 64d2532ce9 | ||
|  | 0376910f33 | ||
|  | 6d503119a1 | ||
|  | bfae93e57e | ||
|  | 49a66ba81a | ||
|  | a1d43fecd9 | ||
|  | d0e42a4798 | ||
|  | 2a34358abc | ||
|  | fd2bb8ea45 | ||
|  | 98e5daa0e0 | ||
|  | ad2e119282 | ||
|  | c20c30d8d1 | ||
|  | 66d215c9c1 | ||
|  | 46e088d379 | ||
|  | bbdd15161a | ||
|  | ea9dc8cfb8 | ||
|  | 6bd2ccc9bf | ||
|  | 56327c6b58 | ||
|  | 712e8a51e4 | ||
|  | 421f324f9e | ||
|  | 8fe4a70299 | ||
|  | 3af6d0dbfd | ||
|  | e2bef076d3 | ||
|  | 1bf9f28f4b | ||
|  | f1e7b97a93 | ||
|  | 8cfe13ad90 | ||
|  | 0f420abc8e | ||
|  | 3b5b715567 | ||
|  | 520051af25 | ||
|  | 7e376b40bb | ||
|  | fd18a48608 | ||
|  | 64860c6287 | ||
|  | 58635b24ba | ||
|  | 3ec9dfc108 | ||
|  | bd1572f11a | ||
|  | 540a0cc59c | ||
|  | 83eb4f6b16 | ||
|  | 95c58bd793 | ||
|  | 65591c7727 | ||
|  | 737cbf5f60 | ||
|  | 4c67cbb4b7 | ||
|  | ed2cc2a60b | ||
|  | 859e9b3cc4 | ||
|  | c34e79fad9 | ||
|  | 82446d641e | ||
|  | 9451c9f331 | ||
|  | 61411bb259 | ||
|  | fcdb0eff8f | ||
|  | 30d9347272 | ||
|  | 7564bbdee8 | ||
|  | 69251e5000 | ||
|  | 6ecdc7b59d | ||
|  | b7d0d8f0cc | ||
|  | df52ed1162 | ||
|  | aa6370dd5d | ||
|  | c272b7901f | ||
|  | c61de6540a | ||
|  | 3c7bf50089 | ||
|  | 32fc4152a7 | ||
|  | bdf7187d5c | ||
|  | 1639576203 | ||
|  | ae20c785ea | ||
|  | a2eb876f8c | ||
|  | 5a1eaa0a98 | ||
|  | 398fd4a548 | ||
|  | 44b9fb66e1 | ||
|  | 2afa2171f9 | ||
|  | 1d7ea71c0d | ||
|  | 2a391f0f16 | ||
|  | e9b8093dac | ||
|  | 6a229cfbc5 | ||
|  | 3300f409ba | ||
|  | 4466005363 | ||
|  | 296ef5bddf | ||
|  | 1f2a432e82 | ||
|  | 855933ab2a | ||
|  | ece8d25187 | ||
|  | 589a720162 | ||
|  | a59b518cf2 | ||
|  | a15352a4f8 | ||
|  | df65f3fc3f | ||
|  | 734986c1b5 | ||
|  | 4a9ed5f2f2 | ||
|  | 088f229865 | ||
|  | cb2cb851e2 | ||
|  | d3962c4f7d | ||
|  | 0301135f96 | ||
|  | f59aa922ea | ||
|  | f60a49d6f6 | ||
|  | 9a190eb00d | ||
|  | 6bad4bd415 | ||
|  | 50d9b0b796 | ||
|  | 12f884e3ac | ||
|  | 02b1aa7355 | ||
|  | 90bfa608dd | ||
|  | 13f38b1c1d | ||
|  | 1afe7240f4 | ||
|  | 7a41155178 | ||
|  | 39a20ea471 | ||
|  | d8855a4a0f | ||
|  | de8da78042 | ||
|  | 318b42dff2 | ||
|  | 0018674b62 | ||
|  | 82913e8d69 | ||
|  | 0d867a108d | ||
|  | 5ee4b4a5ac | ||
|  | 62219d9648 | ||
|  | 6d9bfff19c | ||
|  | 7614b92197 | ||
|  | 7c1afd0031 | ||
|  | ca7b2371fb | ||
|  | ed5fba6b0f | ||
|  | 2b3b3bf652 | ||
|  | 11daf706df | ||
|  | 4a269eb2c4 | ||
|  | 9b3899476c | ||
|  | febb3d7e3d | ||
|  | 83e3c5c7d8 | ||
|  | 3c271845c9 | ||
|  | 56c4292164 | ||
|  | 2531ade3bb | ||
|  | 3e2f035400 | ||
|  | e7bcb5e366 | ||
|  | 112e921ce2 | ||
|  | 216f15602b | ||
|  | fbe1901e65 | ||
|  | 8d2bc444bb | ||
|  | cf4a45da11 | ||
|  | be78209f94 | ||
|  | 45b5bf73fe | ||
|  | 84f9e44b6c | ||
|  | 700bc1b4bb | ||
|  | beef2ede25 | ||
|  | 9bfc838029 | ||
|  | e9d7353294 | ||
|  | a6948771d8 | ||
|  | 403977cd49 | ||
|  | 153538cef9 | ||
|  | 9f1196e982 | ||
|  | 6419a8d09a | ||
|  | 769cee3d64 | ||
|  | fc460b775e | ||
|  | ba59e498de | ||
|  | 939bd2bb1f | ||
|  | e231f71b4a | ||
|  | d06c5f036b | ||
|  | 071562d755 | ||
|  | 391f659af1 | ||
|  | 8a44232bfc | ||
|  | 9188f9bf62 | ||
|  | 0187a0e113 | ||
|  | beacfae400 | ||
|  | fdc385ea33 | ||
|  | 8b97808931 | ||
|  | 179c4a10c8 | ||
|  | 6cef571bfb | ||
|  | fbe8b28b2e | ||
|  | a8d91a56bf | ||
|  | 8d7291506e | ||
|  | d9005ac2fc | ||
|  | c775c0a80c | ||
|  | 700e2cd93d | ||
|  | 083f00be84 | ||
|  | d00859ecfd | ||
|  | 4e73566c11 | ||
|  | 208a467b24 | ||
|  | e1bb453f32 | ||
|  | 4607b08be5 | ||
|  | aa5c776f3d | ||
|  | 0075c0a1e8 | ||
|  | 83fff80b0f | ||
|  | 5e553ffaf7 | ||
|  | 6d185b7f7a | ||
|  | e80144e9f2 | ||
|  | fa4b820931 | ||
|  | 63c5a4dd65 | ||
|  | 34646a414c | ||
|  | 5aeee9deb2 | ||
|  | 4c1509a62a | ||
|  | bfdaae944d | ||
|  | 4e44198bbd | ||
|  | a4e8177b76 | ||
|  | 81bf5cb78b | ||
|  | a9fc476fb8 | ||
|  | 26f0c06624 | ||
|  | 59bd72a888 | ||
|  | 7d808b483e | ||
|  | 3ee60affa9 | ||
|  | 558b8123b5 | ||
|  | ecdf2ae5c7 | ||
|  | aa9ed614ad | ||
|  | 1acdb880fc | ||
|  | 7cd22aaf83 | ||
|  | 5eb63cfa30 | ||
|  | 5dc998ed52 | ||
|  | 8074094568 | ||
|  | 56d1139d71 | ||
|  | 165cdc8840 | ||
|  | c42aef74de | ||
|  | 634e1f661f | ||
|  | a1db437c42 | ||
|  | b8e2bdc99f | ||
|  | 52d4ea7d78 | ||
|  | 7db5335420 | ||
|  | 62480fe940 | ||
|  | 3d7b30da77 | ||
|  | 8e87648d53 | ||
|  | f842c90007 | ||
|  | 7f2b686ab5 | ||
|  | b09c52fc7e | ||
|  | 202d6e414f | ||
|  | 3d817f145c | ||
|  | 181e191fee | ||
|  | 79ecf027dd | ||
|  | 76d771d20f | ||
|  | 24b8650026 | ||
|  | 269e6e29d6 | ||
|  | c4b0002ddb | ||
|  | 53598781b8 | ||
|  | 0624cdd6e4 | ||
|  | 5fb9d61d28 | ||
|  | 7b1860d17b | ||
|  | 8797565606 | ||
|  | 3d97c41fe9 | ||
|  | 5edfeb2e29 | ||
|  | 268908b3b2 | ||
|  | fb70b47acb | ||
|  | 219d316b49 | ||
|  | 3aa2233b5d | ||
|  | d59862ae6e | ||
|  | 0a03f9a31a | ||
|  | dca135190a | ||
|  | aedcf3dc81 | ||
|  | 6961a9494f | ||
|  | 6d70ef1a08 | ||
|  | e1fc15875d | ||
|  | 94ae1388b1 | ||
|  | 17728d4e74 | ||
|  | 417aa743ca | ||
|  | 2f26f7a827 | ||
|  | 09f9c59b3d | ||
|  | bec6805296 | ||
|  | d99c7c20cc | ||
|  | 60b6ad3fcf | ||
|  | 9b4d0f6450 | ||
|  | 1a2c74391c | ||
|  | 08288e591c | ||
|  | 823cf421fa | ||
|  | 3799f27734 | ||
|  | a7edd8602c | ||
|  | c081aca794 | ||
|  | 2ca6648227 | ||
|  | 1af54f93f5 | ||
|  | a9cacd2e06 | ||
|  | f7fbb3d2f6 | ||
|  | adb7bbeea0 | ||
|  | b91db87ae0 | 
							
								
								
									
										8
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										8
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -1,7 +1,8 @@ | |||||||
| .* | .* | ||||||
| !.gitignore | !.gitignore | ||||||
| *.pyc | *~ | ||||||
| .*.swp | *.py[co] | ||||||
|  | .*.sw[po] | ||||||
| *.egg | *.egg | ||||||
| docs/.build | docs/.build | ||||||
| docs/_build | docs/_build | ||||||
| @@ -12,4 +13,5 @@ env/ | |||||||
| .settings | .settings | ||||||
| .project | .project | ||||||
| .pydevproject | .pydevproject | ||||||
| tests/bugfix.py | tests/test_bugfix.py | ||||||
|  | htmlcov/ | ||||||
							
								
								
									
										12
									
								
								.travis.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								.travis.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,12 @@ | |||||||
|  | # http://travis-ci.org/#!/MongoEngine/mongoengine | ||||||
|  | language: python | ||||||
|  | python: | ||||||
|  |     - 2.6 | ||||||
|  |     - 2.7 | ||||||
|  | install: | ||||||
|  |     - sudo apt-get install zlib1g zlib1g-dev | ||||||
|  |     - sudo ln -s /usr/lib/i386-linux-gnu/libz.so /usr/lib/ | ||||||
|  |     - pip install PIL --use-mirrors ; true | ||||||
|  |     - python setup.py install | ||||||
|  | script: | ||||||
|  |     - python setup.py test | ||||||
							
								
								
									
										43
									
								
								AUTHORS
									
									
									
									
									
								
							
							
						
						
									
										43
									
								
								AUTHORS
									
									
									
									
									
								
							| @@ -1,11 +1,11 @@ | |||||||
| The PRIMARY AUTHORS are (and/or have been): | The PRIMARY AUTHORS are (and/or have been): | ||||||
|  |  | ||||||
|  | Ross Lawley <ross.lawley@gmail.com> | ||||||
| Harry Marr <harry@hmarr.com> | Harry Marr <harry@hmarr.com> | ||||||
| Matt Dennewitz <mattdennewitz@gmail.com> | Matt Dennewitz <mattdennewitz@gmail.com> | ||||||
| Deepak Thukral <iapain@yahoo.com> | Deepak Thukral <iapain@yahoo.com> | ||||||
| Florian Schlachter <flori@n-schlachter.de> | Florian Schlachter <flori@n-schlachter.de> | ||||||
| Steve Challis <steve@stevechallis.com> | Steve Challis <steve@stevechallis.com> | ||||||
| Ross Lawley <ross.lawley@gmail.com> |  | ||||||
| Wilson Júnior <wilsonpjunior@gmail.com> | Wilson Júnior <wilsonpjunior@gmail.com> | ||||||
| Dan Crosta https://github.com/dcrosta | Dan Crosta https://github.com/dcrosta | ||||||
|  |  | ||||||
| @@ -67,5 +67,44 @@ that much better: | |||||||
|  * Gareth Lloyd |  * Gareth Lloyd | ||||||
|  * Albert Choi |  * Albert Choi | ||||||
|  * John Arnfield |  * John Arnfield | ||||||
|  |  * grubberr | ||||||
|  |  * Paul Aliagas | ||||||
|  |  * Paul Cunnane | ||||||
|  * Julien Rebetez |  * Julien Rebetez | ||||||
|  |  * Marc Tamlyn | ||||||
|  |  * Karim Allah | ||||||
|  |  * Adam Parrish | ||||||
|  |  * jpfarias | ||||||
|  |  * jonrscott | ||||||
|  |  * Alice Zoë Bevan-McGregor | ||||||
|  |  * Stephen Young | ||||||
|  |  * tkloc | ||||||
|  |  * aid | ||||||
|  |  * yamaneko1212 | ||||||
|  |  * dave mankoff | ||||||
|  |  * Alexander G. Morano | ||||||
|  |  * jwilder | ||||||
|  |  * Joe Shaw | ||||||
|  |  * Adam Flynn | ||||||
|  |  * Ankhbayar | ||||||
|  |  * Jan Schrewe | ||||||
|  |  * David Koblas | ||||||
|  |  * Crittercism | ||||||
|  |  * Alvin Liang | ||||||
|  |  * andrewmlevy | ||||||
|  |  * Chris Faulkner | ||||||
|  |  * Ashwin Purohit | ||||||
|  |  * Shalabh Aggarwal | ||||||
|  |  * Chris Williams | ||||||
|  |  * Robert Kajic | ||||||
|  |  * Jacob Peddicord | ||||||
|  |  * Nils Hasenbanck | ||||||
|  |  * mostlystatic | ||||||
|  |  * Greg Banks | ||||||
|  |  * swashbuckler | ||||||
|  |  * Adam Reeve | ||||||
|  |  * Anthony Nemitz | ||||||
|  |  * deignacio | ||||||
|  |  * shaunduncan | ||||||
|  |  * Meir Kriheli | ||||||
|  |  * Andrey Fedoseev | ||||||
							
								
								
									
										19
									
								
								README.rst
									
									
									
									
									
								
							
							
						
						
									
										19
									
								
								README.rst
									
									
									
									
									
								
							| @@ -3,25 +3,29 @@ MongoEngine | |||||||
| =========== | =========== | ||||||
| :Info: MongoEngine is an ORM-like layer on top of PyMongo. | :Info: MongoEngine is an ORM-like layer on top of PyMongo. | ||||||
| :Author: Harry Marr (http://github.com/hmarr) | :Author: Harry Marr (http://github.com/hmarr) | ||||||
|  | :Maintainer: Ross Lawley (http://github.com/rozza) | ||||||
|  |  | ||||||
|  | .. image:: https://secure.travis-ci.org/MongoEngine/mongoengine.png?branch=master | ||||||
|  |   :target: http://travis-ci.org/MongoEngine/mongoengine | ||||||
|  |  | ||||||
| About | About | ||||||
| ===== | ===== | ||||||
| MongoEngine is a Python Object-Document Mapper for working with MongoDB. | MongoEngine is a Python Object-Document Mapper for working with MongoDB. | ||||||
| Documentation available at http://hmarr.com/mongoengine/ - there is currently  | Documentation available at http://mongoengine-odm.rtfd.org - there is currently | ||||||
| a `tutorial <http://hmarr.com/mongoengine/tutorial.html>`_, a `user guide  | a `tutorial <http://readthedocs.org/docs/mongoengine-odm/en/latest/tutorial.html>`_, a `user guide | ||||||
| <http://hmarr.com/mongoengine/userguide.html>`_ and an `API reference | <http://readthedocs.org/docs/mongoengine-odm/en/latest/userguide.html>`_ and an `API reference | ||||||
| <http://hmarr.com/mongoengine/apireference.html>`_. | <http://readthedocs.org/docs/mongoengine-odm/en/latest/apireference.html>`_. | ||||||
|  |  | ||||||
| Installation | Installation | ||||||
| ============ | ============ | ||||||
| If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ | If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ | ||||||
| you can use ``easy_install -U mongoengine``. Otherwise, you can download the | you can use ``easy_install -U mongoengine``. Otherwise, you can download the | ||||||
| source from `GitHub <http://github.com/hmarr/mongoengine>`_ and run ``python | source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python | ||||||
| setup.py install``. | setup.py install``. | ||||||
|  |  | ||||||
| Dependencies | Dependencies | ||||||
| ============ | ============ | ||||||
| - pymongo 1.1+ | - pymongo 2.1.1+ | ||||||
| - sphinx (optional - for documentation generation) | - sphinx (optional - for documentation generation) | ||||||
|  |  | ||||||
| Examples | Examples | ||||||
| @@ -92,6 +96,7 @@ Community | |||||||
|  |  | ||||||
| Contributing | Contributing | ||||||
| ============ | ============ | ||||||
| The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_ - to | The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ - to | ||||||
| contribute to the project, fork it on GitHub and send a pull request, all | contribute to the project, fork it on GitHub and send a pull request, all | ||||||
| contributions and suggestions are welcome! | contributions and suggestions are welcome! | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										182
									
								
								benchmark.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										182
									
								
								benchmark.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,182 @@ | |||||||
|  | #!/usr/bin/env python | ||||||
|  |  | ||||||
|  | import timeit | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def cprofile_main(): | ||||||
|  |     from pymongo import Connection | ||||||
|  |     connection = Connection() | ||||||
|  |     connection.drop_database('timeit_test') | ||||||
|  |     connection.disconnect() | ||||||
|  |  | ||||||
|  |     from mongoengine import Document, DictField, connect | ||||||
|  |     connect("timeit_test") | ||||||
|  |  | ||||||
|  |     class Noddy(Document): | ||||||
|  |         fields = DictField() | ||||||
|  |  | ||||||
|  |     for i in xrange(1): | ||||||
|  |         noddy = Noddy() | ||||||
|  |         for j in range(20): | ||||||
|  |             noddy.fields["key" + str(j)] = "value " + str(j) | ||||||
|  |         noddy.save() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def main(): | ||||||
|  |     """ | ||||||
|  |     0.4 Performance Figures ... | ||||||
|  |  | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - Pymongo | ||||||
|  |     1.1141769886 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine | ||||||
|  |     2.37724113464 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False | ||||||
|  |     1.92479610443 | ||||||
|  |  | ||||||
|  |     0.5.X | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - Pymongo | ||||||
|  |     1.10552310944 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine | ||||||
|  |     16.5169169903 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False | ||||||
|  |     14.9446101189 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False | ||||||
|  |     14.912801981 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine, force=True | ||||||
|  |     14.9617750645 | ||||||
|  |  | ||||||
|  |     Performance | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - Pymongo | ||||||
|  |     1.10072994232 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine | ||||||
|  |     5.27341103554 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False | ||||||
|  |     4.49365401268 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False | ||||||
|  |     4.43459296227 | ||||||
|  |     ---------------------------------------------------------------------------------------------------- | ||||||
|  |     Creating 10000 dictionaries - MongoEngine, force=True | ||||||
|  |     4.40114378929 | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     setup = """ | ||||||
|  | from pymongo import Connection | ||||||
|  | connection = Connection() | ||||||
|  | connection.drop_database('timeit_test') | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | from pymongo import Connection | ||||||
|  | connection = Connection() | ||||||
|  |  | ||||||
|  | db = connection.timeit_test | ||||||
|  | noddy = db.noddy | ||||||
|  |  | ||||||
|  | for i in xrange(10000): | ||||||
|  |     example = {'fields': {}} | ||||||
|  |     for j in range(20): | ||||||
|  |         example['fields']["key"+str(j)] = "value "+str(j) | ||||||
|  |  | ||||||
|  |     noddy.insert(example) | ||||||
|  |  | ||||||
|  | myNoddys = noddy.find() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print "-" * 100 | ||||||
|  |     print """Creating 10000 dictionaries - Pymongo""" | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print t.timeit(1) | ||||||
|  |  | ||||||
|  |     setup = """ | ||||||
|  | from pymongo import Connection | ||||||
|  | connection = Connection() | ||||||
|  | connection.drop_database('timeit_test') | ||||||
|  | connection.disconnect() | ||||||
|  |  | ||||||
|  | from mongoengine import Document, DictField, connect | ||||||
|  | connect("timeit_test") | ||||||
|  |  | ||||||
|  | class Noddy(Document): | ||||||
|  |     fields = DictField() | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in xrange(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save() | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print "-" * 100 | ||||||
|  |     print """Creating 10000 dictionaries - MongoEngine""" | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print t.timeit(1) | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in xrange(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save(safe=False, validate=False) | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print "-" * 100 | ||||||
|  |     print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False""" | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print t.timeit(1) | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in xrange(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save(safe=False, validate=False, cascade=False) | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print "-" * 100 | ||||||
|  |     print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False""" | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print t.timeit(1) | ||||||
|  |  | ||||||
|  |     stmt = """ | ||||||
|  | for i in xrange(10000): | ||||||
|  |     noddy = Noddy() | ||||||
|  |     for j in range(20): | ||||||
|  |         noddy.fields["key"+str(j)] = "value "+str(j) | ||||||
|  |     noddy.save(force_insert=True, safe=False, validate=False, cascade=False) | ||||||
|  |  | ||||||
|  | myNoddys = Noddy.objects() | ||||||
|  | [n for n in myNoddys] # iterate | ||||||
|  | """ | ||||||
|  |  | ||||||
|  |     print "-" * 100 | ||||||
|  |     print """Creating 10000 dictionaries - MongoEngine, force=True""" | ||||||
|  |     t = timeit.Timer(stmt=stmt, setup=setup) | ||||||
|  |     print t.timeit(1) | ||||||
|  |  | ||||||
|  | if __name__ == "__main__": | ||||||
|  |     main() | ||||||
| @@ -6,6 +6,7 @@ Connecting | |||||||
| ========== | ========== | ||||||
|  |  | ||||||
| .. autofunction:: mongoengine.connect | .. autofunction:: mongoengine.connect | ||||||
|  | .. autofunction:: mongoengine.register_connection | ||||||
|  |  | ||||||
| Documents | Documents | ||||||
| ========= | ========= | ||||||
| @@ -21,9 +22,18 @@ Documents | |||||||
| .. autoclass:: mongoengine.EmbeddedDocument | .. autoclass:: mongoengine.EmbeddedDocument | ||||||
|    :members: |    :members: | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.DynamicDocument | ||||||
|  |    :members: | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.DynamicEmbeddedDocument | ||||||
|  |    :members: | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.document.MapReduceDocument | .. autoclass:: mongoengine.document.MapReduceDocument | ||||||
|   :members: |   :members: | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.ValidationError | ||||||
|  |   :members: | ||||||
|  |  | ||||||
| Querying | Querying | ||||||
| ======== | ======== | ||||||
|  |  | ||||||
|   | |||||||
| @@ -2,15 +2,157 @@ | |||||||
| Changelog | Changelog | ||||||
| ========= | ========= | ||||||
|  |  | ||||||
|  | Changes in 0.6.11 | ||||||
|  | ================== | ||||||
|  | - Fixed inconsistency handling None values field attrs | ||||||
|  | - Fixed map_field embedded db_field issue | ||||||
|  | - Fixed .save() _delta issue with DbRefs | ||||||
|  | - Fixed Django TestCase | ||||||
|  | - Added cmp to Embedded Document | ||||||
|  | - Added PULL reverse_delete_rule | ||||||
|  | - Fixed CASCADE delete bug | ||||||
|  | - Fixed db_field data load error | ||||||
|  | - Fixed recursive save with FileField | ||||||
|  |  | ||||||
|  | Changes in 0.6.10 | ||||||
|  | ================= | ||||||
|  | - Fixed basedict / baselist to return super(..) | ||||||
|  | - Promoted BaseDynamicField to DynamicField | ||||||
|  |  | ||||||
|  | Changes in 0.6.9 | ||||||
|  | ================ | ||||||
|  | - Fixed sparse indexes on inherited docs | ||||||
|  | - Removed FileField auto deletion, needs more work maybe 0.7 | ||||||
|  |  | ||||||
|  | Changes in 0.6.8 | ||||||
|  | ================ | ||||||
|  | - Fixed FileField losing reference when no default set | ||||||
|  | - Removed possible race condition from FileField (grid_file) | ||||||
|  | - Added assignment to save, can now do: b = MyDoc(**kwargs).save() | ||||||
|  | - Added support for pull operations on nested EmbeddedDocuments | ||||||
|  | - Added support for choices with GenericReferenceFields | ||||||
|  | - Added support for choices with GenericEmbeddedDocumentFields | ||||||
|  | - Fixed Django 1.4 sessions first save data loss | ||||||
|  | - FileField now automatically delete files on .delete() | ||||||
|  | - Fix for GenericReference to_mongo method | ||||||
|  | - Fixed connection regression | ||||||
|  | - Updated Django User document, now allows inheritance | ||||||
|  |  | ||||||
|  | Changes in 0.6.7 | ||||||
|  | ================ | ||||||
|  | - Fixed indexing on '_id' or 'pk' or 'id' | ||||||
|  | - Invalid data from the DB now raises a InvalidDocumentError | ||||||
|  | - Cleaned up the Validation Error - docs and code | ||||||
|  | - Added meta `auto_create_index` so you can disable index creation | ||||||
|  | - Added write concern options to inserts | ||||||
|  | - Fixed typo in meta for index options | ||||||
|  | - Bug fix Read preference now passed correctly | ||||||
|  | - Added support for File like objects for GridFS | ||||||
|  | - Fix for #473 - Dereferencing abstracts | ||||||
|  |  | ||||||
|  | Changes in 0.6.6 | ||||||
|  | ================ | ||||||
|  | - Django 1.4 fixed (finally) | ||||||
|  | - Added tests for Django | ||||||
|  |  | ||||||
|  | Changes in 0.6.5 | ||||||
|  | ================ | ||||||
|  | - More Django updates | ||||||
|  |  | ||||||
|  | Changes in 0.6.4 | ||||||
|  | ================ | ||||||
|  |  | ||||||
|  | - Refactored connection / fixed replicasetconnection | ||||||
|  | - Bug fix for unknown connection alias error message | ||||||
|  | - Sessions support Django 1.3 and Django 1.4 | ||||||
|  | - Minor fix for ReferenceField | ||||||
|  |  | ||||||
|  | Changes in 0.6.3 | ||||||
|  | ================ | ||||||
|  | - Updated sessions for Django 1.4 | ||||||
|  | - Bug fix for updates where listfields contain embedded documents | ||||||
|  | - Bug fix for collection naming and mixins | ||||||
|  |  | ||||||
|  | Changes in 0.6.2 | ||||||
|  | ================ | ||||||
|  | - Updated documentation for ReplicaSet connections | ||||||
|  | - Hack round _types issue with SERVER-5247 - querying other arrays may also cause problems. | ||||||
|  |  | ||||||
|  | Changes in 0.6.1 | ||||||
|  | ================ | ||||||
|  | - Fix for replicaSet connections | ||||||
|  |  | ||||||
|  | Changes in 0.6 | ||||||
|  | ================ | ||||||
|  |  | ||||||
|  | - Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7 | ||||||
|  | - Added support for covered indexes when inheritance is off | ||||||
|  | - No longer always upsert on save for items with a '_id' | ||||||
|  | - Error raised if update doesn't have an operation | ||||||
|  | - DeReferencing is now thread safe | ||||||
|  | - Errors raised if trying to perform a join in a query | ||||||
|  | - Updates can now take __raw__ queries | ||||||
|  | - Added custom 2D index declarations | ||||||
|  | - Added replicaSet connection support | ||||||
|  | - Updated deprecated imports from pymongo (safe for pymongo 2.2) | ||||||
|  | - Added uri support for connections | ||||||
|  | - Added scalar for efficiently returning partial data values (aliased to values_list) | ||||||
|  | - Fixed limit skip bug | ||||||
|  | - Improved Inheritance / Mixin | ||||||
|  | - Added sharding support | ||||||
|  | - Added pymongo 2.1 support | ||||||
|  | - Fixed Abstract documents can now declare indexes | ||||||
|  | - Added db_alias support to individual documents | ||||||
|  | - Fixed GridFS documents can now be pickled | ||||||
|  | - Added Now raises an InvalidDocumentError when declaring multiple fields with the same db_field | ||||||
|  | - Added InvalidQueryError when calling with_id with a filter | ||||||
|  | - Added support for DBRefs in distinct() | ||||||
|  | - Fixed issue saving False booleans | ||||||
|  | - Fixed issue with dynamic documents deltas | ||||||
|  | - Added Reverse Delete Rule support to ListFields - MapFields aren't supported | ||||||
|  | - Added customisable cascade kwarg options | ||||||
|  | - Fixed Handle None values for non-required fields | ||||||
|  | - Removed Document._get_subclasses() - no longer required | ||||||
|  | - Fixed bug requiring subclasses when not actually needed | ||||||
|  | - Fixed deletion of dynamic data | ||||||
|  | - Added support for the $elementMatch operator | ||||||
|  | - Added reverse option to SortedListFields | ||||||
|  | - Fixed dereferencing - multi directional list dereferencing | ||||||
|  | - Fixed issue creating indexes with recursive embedded documents | ||||||
|  | - Fixed recursive lookup in _unique_with_indexes | ||||||
|  | - Fixed passing ComplexField defaults to constructor for ReferenceFields | ||||||
|  | - Fixed validation of DictField Int keys | ||||||
|  | - Added optional cascade saving | ||||||
|  | - Fixed dereferencing - max_depth now taken into account | ||||||
|  | - Fixed document mutation saving issue | ||||||
|  | - Fixed positional operator when replacing embedded documents | ||||||
|  | - Added Non-Django Style choices back (you can have either) | ||||||
|  | - Fixed __repr__ of a sliced queryset | ||||||
|  | - Added recursive validation error of documents / complex fields | ||||||
|  | - Fixed breaking during queryset iteration | ||||||
|  | - Added pre and post bulk-insert signals | ||||||
|  | - Added ImageField - requires PIL | ||||||
|  | - Fixed Reference Fields can be None in get_or_create / queries | ||||||
|  | - Fixed accessing pk on an embedded document | ||||||
|  | - Fixed calling a queryset after drop_collection now recreates the collection | ||||||
|  | - Add field name to validation exception messages | ||||||
|  | - Added UUID field | ||||||
|  | - Improved efficiency of .get() | ||||||
|  | - Updated ComplexFields so if required they won't accept empty lists / dicts | ||||||
|  | - Added spec file for rpm-based distributions | ||||||
|  | - Fixed ListField so it doesnt accept strings | ||||||
|  | - Added DynamicDocument and EmbeddedDynamicDocument classes for expando schemas | ||||||
|  |  | ||||||
| Changes in v0.5.2 | Changes in v0.5.2 | ||||||
| ================= | ================= | ||||||
|  |  | ||||||
| - A Robust Circular reference bugfix | - A Robust Circular reference bugfix | ||||||
|  |  | ||||||
|  |  | ||||||
| Changes in v0.5.1 | Changes in v0.5.1 | ||||||
| ================= | ================= | ||||||
|  |  | ||||||
| - Circular reference bugfix | - Fixed simple circular reference bug | ||||||
|  |  | ||||||
| Changes in v0.5 | Changes in v0.5 | ||||||
| =============== | =============== | ||||||
|   | |||||||
| @@ -38,7 +38,7 @@ master_doc = 'index' | |||||||
|  |  | ||||||
| # General information about the project. | # General information about the project. | ||||||
| project = u'MongoEngine' | project = u'MongoEngine' | ||||||
| copyright = u'2009-2011, Harry Marr' | copyright = u'2009-2012, MongoEngine Authors' | ||||||
|  |  | ||||||
| # The version info for the project you're documenting, acts as replacement for | # The version info for the project you're documenting, acts as replacement for | ||||||
| # |version| and |release|, also used in various other places throughout the | # |version| and |release|, also used in various other places throughout the | ||||||
| @@ -121,7 +121,7 @@ html_theme_path = ['_themes'] | |||||||
| # Add any paths that contain custom static files (such as style sheets) here, | # Add any paths that contain custom static files (such as style sheets) here, | ||||||
| # relative to this directory. They are copied after the builtin static files, | # relative to this directory. They are copied after the builtin static files, | ||||||
| # so a file named "default.css" will overwrite the builtin "default.css". | # so a file named "default.css" will overwrite the builtin "default.css". | ||||||
| html_static_path = ['_static'] | #html_static_path = ['_static'] | ||||||
|  |  | ||||||
| # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, | ||||||
| # using the given strftime format. | # using the given strftime format. | ||||||
|   | |||||||
| @@ -2,6 +2,8 @@ | |||||||
| Using MongoEngine with Django | Using MongoEngine with Django | ||||||
| ============================= | ============================= | ||||||
|  |  | ||||||
|  | .. note :: Updated to support Django 1.4 | ||||||
|  |  | ||||||
| Connecting | Connecting | ||||||
| ========== | ========== | ||||||
| In your **settings.py** file, ignore the standard database settings (unless you | In your **settings.py** file, ignore the standard database settings (unless you | ||||||
|   | |||||||
| @@ -3,6 +3,7 @@ | |||||||
| ===================== | ===================== | ||||||
| Connecting to MongoDB | Connecting to MongoDB | ||||||
| ===================== | ===================== | ||||||
|  |  | ||||||
| To connect to a running instance of :program:`mongod`, use the | To connect to a running instance of :program:`mongod`, use the | ||||||
| :func:`~mongoengine.connect` function. The first argument is the name of the | :func:`~mongoengine.connect` function. The first argument is the name of the | ||||||
| database to connect to. If the database does not exist, it will be created. If | database to connect to. If the database does not exist, it will be created. If | ||||||
| @@ -18,3 +19,47 @@ provide :attr:`host` and :attr:`port` arguments to | |||||||
| :func:`~mongoengine.connect`:: | :func:`~mongoengine.connect`:: | ||||||
|  |  | ||||||
|     connect('project1', host='192.168.1.35', port=12345) |     connect('project1', host='192.168.1.35', port=12345) | ||||||
|  |  | ||||||
|  | Uri style connections are also supported as long as you include the database | ||||||
|  | name - just supply the uri as the :attr:`host` to | ||||||
|  | :func:`~mongoengine.connect`:: | ||||||
|  |  | ||||||
|  |     connect('project1', host='mongodb://localhost/database_name') | ||||||
|  |  | ||||||
|  | ReplicaSets | ||||||
|  | =========== | ||||||
|  |  | ||||||
|  | MongoEngine now supports :func:`~pymongo.replica_set_connection.ReplicaSetConnection` | ||||||
|  | to use them please use a URI style connection and provide the `replicaSet` name in the | ||||||
|  | connection kwargs. | ||||||
|  |  | ||||||
|  | Multiple Databases | ||||||
|  | ================== | ||||||
|  |  | ||||||
|  | Multiple database support was added in MongoEngine 0.6. To use multiple | ||||||
|  | databases you can use :func:`~mongoengine.connect` and provide an `alias` name | ||||||
|  | for the connection - if no `alias` is provided then "default" is used. | ||||||
|  |  | ||||||
|  | In the background this uses :func:`~mongoengine.register_connection` to | ||||||
|  | store the data and you can register all aliases up front if required. | ||||||
|  |  | ||||||
|  | Individual documents can also support multiple databases by providing a | ||||||
|  | `db_alias` in their meta data.  This allows :class:`~pymongo.dbref.DBRef` objects | ||||||
|  | to point across databases and collections.  Below is an example schema, using | ||||||
|  | 3 different databases to store data:: | ||||||
|  |  | ||||||
|  |         class User(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |             meta = {"db_alias": "user-db"} | ||||||
|  |  | ||||||
|  |         class Book(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |             meta = {"db_alias": "book-db"} | ||||||
|  |  | ||||||
|  |         class AuthorBooks(Document): | ||||||
|  |             author = ReferenceField(User) | ||||||
|  |             book = ReferenceField(Book) | ||||||
|  |  | ||||||
|  |             meta = {"db_alias": "users-books-db"} | ||||||
|   | |||||||
| @@ -24,6 +24,34 @@ objects** as class attributes to the document class:: | |||||||
|         title = StringField(max_length=200, required=True) |         title = StringField(max_length=200, required=True) | ||||||
|         date_modified = DateTimeField(default=datetime.datetime.now) |         date_modified = DateTimeField(default=datetime.datetime.now) | ||||||
|  |  | ||||||
|  | Dynamic document schemas | ||||||
|  | ======================== | ||||||
|  | One of the benefits of MongoDb is dynamic schemas for a collection, whilst data | ||||||
|  | should be planned and organised (after all explicit is better than implicit!) | ||||||
|  | there are scenarios where having dynamic / expando style documents is desirable. | ||||||
|  |  | ||||||
|  | :class:`~mongoengine.DynamicDocument` documents work in the same way as | ||||||
|  | :class:`~mongoengine.Document` but any data / attributes set to them will also | ||||||
|  | be saved :: | ||||||
|  |  | ||||||
|  |     from mongoengine import * | ||||||
|  |  | ||||||
|  |     class Page(DynamicDocument): | ||||||
|  |         title = StringField(max_length=200, required=True) | ||||||
|  |  | ||||||
|  |     # Create a new page and add tags | ||||||
|  |     >>> page = Page(title='Using MongoEngine') | ||||||
|  |     >>> page.tags = ['mongodb', 'mongoengine'] | ||||||
|  |     >>> page.save() | ||||||
|  |  | ||||||
|  |     >>> Page.objects(tags='mongoengine').count() | ||||||
|  |     >>> 1 | ||||||
|  |  | ||||||
|  | ..note:: | ||||||
|  |  | ||||||
|  |    There is one caveat on Dynamic Documents: fields cannot start with `_` | ||||||
|  |  | ||||||
|  |  | ||||||
| Fields | Fields | ||||||
| ====== | ====== | ||||||
| By default, fields are not required. To make a field mandatory, set the | By default, fields are not required. To make a field mandatory, set the | ||||||
| @@ -70,7 +98,7 @@ arguments can be set on all fields: | |||||||
|  |  | ||||||
| :attr:`required` (Default: False) | :attr:`required` (Default: False) | ||||||
|     If set to True and the field is not set on the document instance, a |     If set to True and the field is not set on the document instance, a | ||||||
|     :class:`~mongoengine.base.ValidationError` will be raised when the document is |     :class:`~mongoengine.ValidationError` will be raised when the document is | ||||||
|     validated. |     validated. | ||||||
|  |  | ||||||
| :attr:`default` (Default: None) | :attr:`default` (Default: None) | ||||||
| @@ -107,12 +135,33 @@ arguments can be set on all fields: | |||||||
|     When True, use this field as a primary key for the collection. |     When True, use this field as a primary key for the collection. | ||||||
|  |  | ||||||
| :attr:`choices` (Default: None) | :attr:`choices` (Default: None) | ||||||
|     An iterable of choices to which the value of this field should be limited. |     An iterable (e.g. a list or tuple) of choices to which the value of this | ||||||
|  |     field should be limited. | ||||||
|  |  | ||||||
|  |     Can be either be a nested tuples of value (stored in mongo) and a | ||||||
|  |     human readable key :: | ||||||
|  |  | ||||||
|  |         SIZE = (('S', 'Small'), | ||||||
|  |                 ('M', 'Medium'), | ||||||
|  |                 ('L', 'Large'), | ||||||
|  |                 ('XL', 'Extra Large'), | ||||||
|  |                 ('XXL', 'Extra Extra Large')) | ||||||
|  |  | ||||||
|  |  | ||||||
|  |         class Shirt(Document): | ||||||
|  |             size = StringField(max_length=3, choices=SIZE) | ||||||
|  |  | ||||||
|  |     Or a flat iterable just containing values :: | ||||||
|  |  | ||||||
|  |         SIZE = ('S', 'M', 'L', 'XL', 'XXL') | ||||||
|  |  | ||||||
|  |         class Shirt(Document): | ||||||
|  |             size = StringField(max_length=3, choices=SIZE) | ||||||
|  |  | ||||||
| :attr:`help_text` (Default: None) | :attr:`help_text` (Default: None) | ||||||
|     Optional help text to output with the field - used by form libraries |     Optional help text to output with the field - used by form libraries | ||||||
|  |  | ||||||
| :attr:`verbose` (Default: None) | :attr:`verbose_name` (Default: None) | ||||||
|     Optional human-readable name for the field - used by form libraries |     Optional human-readable name for the field - used by form libraries | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -240,6 +289,10 @@ Its value can take any of the following constants: | |||||||
| :const:`mongoengine.CASCADE` | :const:`mongoengine.CASCADE` | ||||||
|   Any object containing fields that are refererring to the object being deleted |   Any object containing fields that are refererring to the object being deleted | ||||||
|   are deleted first. |   are deleted first. | ||||||
|  | :const:`mongoengine.PULL` | ||||||
|  |   Removes the reference to the object (using MongoDB's "pull" operation) | ||||||
|  |   from any object's fields of | ||||||
|  |   :class:`~mongoengine.ListField` (:class:`~mongoengine.ReferenceField`). | ||||||
|  |  | ||||||
|  |  | ||||||
| .. warning:: | .. warning:: | ||||||
| @@ -382,10 +435,31 @@ If a dictionary is passed then the following options are available: | |||||||
| :attr:`unique` (Default: False) | :attr:`unique` (Default: False) | ||||||
|     Whether the index should be sparse. |     Whether the index should be sparse. | ||||||
|  |  | ||||||
| .. note:: | .. warning:: | ||||||
|  |  | ||||||
|    Geospatial indexes will be automatically created for all |  | ||||||
|    :class:`~mongoengine.GeoPointField`\ s |    Inheritance adds extra indices. | ||||||
|  |    If don't need inheritance for a document turn inheritance off - see :ref:`document-inheritance`. | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Geospatial indexes | ||||||
|  | --------------------------- | ||||||
|  | Geospatial indexes will be automatically created for all | ||||||
|  | :class:`~mongoengine.GeoPointField`\ s | ||||||
|  |  | ||||||
|  | It is also possible to explicitly define geospatial indexes. This is | ||||||
|  | useful if you need to define a geospatial index on a subfield of a | ||||||
|  | :class:`~mongoengine.DictField` or a custom field that contains a | ||||||
|  | point. To create a geospatial index you must prefix the field with the | ||||||
|  | ***** sign. :: | ||||||
|  |  | ||||||
|  |     class Place(Document): | ||||||
|  |         location = DictField() | ||||||
|  |         meta = { | ||||||
|  |             'indexes': [ | ||||||
|  |                 '*location.point', | ||||||
|  |             ], | ||||||
|  |         } | ||||||
|  |  | ||||||
| Ordering | Ordering | ||||||
| ======== | ======== | ||||||
| @@ -427,8 +501,31 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. :: | |||||||
|     first_post = BlogPost.objects.order_by("+published_date").first() |     first_post = BlogPost.objects.order_by("+published_date").first() | ||||||
|     assert first_post.title == "Blog Post #1" |     assert first_post.title == "Blog Post #1" | ||||||
|  |  | ||||||
|  | Shard keys | ||||||
|  | ========== | ||||||
|  |  | ||||||
|  | If your collection is sharded, then you need to specify the shard key as a tuple, | ||||||
|  | using the :attr:`shard_key` attribute of :attr:`-mongoengine.Document.meta`. | ||||||
|  | This ensures that the shard key is sent with the query when calling the | ||||||
|  | :meth:`~mongoengine.document.Document.save` or | ||||||
|  | :meth:`~mongoengine.document.Document.update` method on an existing | ||||||
|  | :class:`-mongoengine.Document` instance:: | ||||||
|  |  | ||||||
|  |     class LogEntry(Document): | ||||||
|  |         machine = StringField() | ||||||
|  |         app = StringField() | ||||||
|  |         timestamp = DateTimeField() | ||||||
|  |         data = StringField() | ||||||
|  |  | ||||||
|  |         meta = { | ||||||
|  |             'shard_key': ('machine', 'timestamp',) | ||||||
|  |         } | ||||||
|  |  | ||||||
|  | .. _document-inheritance: | ||||||
|  |  | ||||||
| Document inheritance | Document inheritance | ||||||
| ==================== | ==================== | ||||||
|  |  | ||||||
| To create a specialised type of a :class:`~mongoengine.Document` you have | To create a specialised type of a :class:`~mongoengine.Document` you have | ||||||
| defined, you may subclass it and add any extra fields or methods you may need. | defined, you may subclass it and add any extra fields or methods you may need. | ||||||
| As this is new class is not a direct subclass of | As this is new class is not a direct subclass of | ||||||
| @@ -440,10 +537,15 @@ convenient and efficient retrieval of related documents:: | |||||||
|     class Page(Document): |     class Page(Document): | ||||||
|         title = StringField(max_length=200, required=True) |         title = StringField(max_length=200, required=True) | ||||||
|  |  | ||||||
|  |         meta = {'allow_inheritance': True} | ||||||
|  |  | ||||||
|     # Also stored in the collection named 'page' |     # Also stored in the collection named 'page' | ||||||
|     class DatedPage(Page): |     class DatedPage(Page): | ||||||
|         date = DateTimeField() |         date = DateTimeField() | ||||||
|  |  | ||||||
|  | .. note:: From 0.7 onwards you must declare `allow_inheritance` in the document meta. | ||||||
|  |  | ||||||
|  |  | ||||||
| Working with existing data | Working with existing data | ||||||
| -------------------------- | -------------------------- | ||||||
| To enable correct retrieval of documents involved in this kind of heirarchy, | To enable correct retrieval of documents involved in this kind of heirarchy, | ||||||
|   | |||||||
| @@ -35,13 +35,23 @@ already exist, then any changes will be updated atomically.  For example:: | |||||||
|     * ``list_field.pop(0)`` - *sets* the resulting list |     * ``list_field.pop(0)`` - *sets* the resulting list | ||||||
|     * ``del(list_field)``   - *unsets* whole list |     * ``del(list_field)``   - *unsets* whole list | ||||||
|  |  | ||||||
| To delete a document, call the :meth:`~mongoengine.Document.delete` method. |  | ||||||
| Note that this will only work if the document exists in the database and has a |  | ||||||
| valide :attr:`id`. |  | ||||||
|  |  | ||||||
| .. seealso:: | .. seealso:: | ||||||
|     :ref:`guide-atomic-updates` |     :ref:`guide-atomic-updates` | ||||||
|  |  | ||||||
|  | Cascading Saves | ||||||
|  | --------------- | ||||||
|  | If your document contains :class:`~mongoengine.ReferenceField` or | ||||||
|  | :class:`~mongoengine.GenericReferenceField` objects, then by default the | ||||||
|  | :meth:`~mongoengine.Document.save` method will automatically save any changes to | ||||||
|  | those objects as well.  If this is not desired passing :attr:`cascade` as False | ||||||
|  | to the save method turns this feature off. | ||||||
|  |  | ||||||
|  | Deleting documents | ||||||
|  | ------------------ | ||||||
|  | To delete a document, call the :meth:`~mongoengine.Document.delete` method. | ||||||
|  | Note that this will only work if the document exists in the database and has a | ||||||
|  | valid :attr:`id`. | ||||||
|  |  | ||||||
| Document IDs | Document IDs | ||||||
| ============ | ============ | ||||||
| Each document in the database has a unique id. This may be accessed through the | Each document in the database has a unique id. This may be accessed through the | ||||||
| @@ -81,5 +91,5 @@ is an alias to :attr:`id`:: | |||||||
| .. note:: | .. note:: | ||||||
|  |  | ||||||
|    If you define your own primary key field, the field implicitly becomes |    If you define your own primary key field, the field implicitly becomes | ||||||
|    required, so a :class:`ValidationError` will be thrown if you don't provide |    required, so a :class:`~mongoengine.ValidationError` will be thrown if | ||||||
|    it. |    you don't provide it. | ||||||
|   | |||||||
| @@ -65,7 +65,7 @@ Deleting stored files is achieved with the :func:`delete` method:: | |||||||
|  |  | ||||||
|     marmot.photo.delete() |     marmot.photo.delete() | ||||||
|  |  | ||||||
| .. note:: | .. warning:: | ||||||
|  |  | ||||||
|     The FileField in a Document actually only stores the ID of a file in a |     The FileField in a Document actually only stores the ID of a file in a | ||||||
|     separate GridFS collection. This means that deleting a document |     separate GridFS collection. This means that deleting a document | ||||||
|   | |||||||
| @@ -76,6 +76,7 @@ expressions: | |||||||
| * ``istartswith`` -- string field starts with value (case insensitive) | * ``istartswith`` -- string field starts with value (case insensitive) | ||||||
| * ``endswith`` -- string field ends with value | * ``endswith`` -- string field ends with value | ||||||
| * ``iendswith`` -- string field ends with value (case insensitive) | * ``iendswith`` -- string field ends with value (case insensitive) | ||||||
|  | * ``match``  -- performs an $elemMatch so you can match an entire document within an array | ||||||
|  |  | ||||||
| There are a few special operators for performing geographical queries, that | There are a few special operators for performing geographical queries, that | ||||||
| may used with :class:`~mongoengine.GeoPointField`\ s: | may used with :class:`~mongoengine.GeoPointField`\ s: | ||||||
| @@ -194,22 +195,6 @@ to be created:: | |||||||
|     >>> a.name == b.name and a.age == b.age |     >>> a.name == b.name and a.age == b.age | ||||||
|     True |     True | ||||||
|  |  | ||||||
| Dereferencing results |  | ||||||
| --------------------- |  | ||||||
| When iterating the results of :class:`~mongoengine.ListField` or |  | ||||||
| :class:`~mongoengine.DictField` we automatically dereference any |  | ||||||
| :class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the |  | ||||||
| number the queries to mongo. |  | ||||||
|  |  | ||||||
| There are times when that efficiency is not enough, documents that have |  | ||||||
| :class:`~mongoengine.ReferenceField` objects or |  | ||||||
| :class:`~mongoengine.GenericReferenceField` objects at the top level are |  | ||||||
| expensive as the number of queries to MongoDB can quickly rise. |  | ||||||
|  |  | ||||||
| To limit the number of queries use |  | ||||||
| :func:`~mongoengine.queryset.QuerySet.select_related` which converts the |  | ||||||
| QuerySet to a list and dereferences as efficiently as possible. |  | ||||||
|  |  | ||||||
| Default Document queries | Default Document queries | ||||||
| ======================== | ======================== | ||||||
| By default, the objects :attr:`~mongoengine.Document.objects` attribute on a | By default, the objects :attr:`~mongoengine.Document.objects` attribute on a | ||||||
| @@ -312,8 +297,16 @@ would be generating "tag-clouds":: | |||||||
|     from operator import itemgetter |     from operator import itemgetter | ||||||
|     top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10] |     top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | Query efficiency and performance | ||||||
|  | ================================ | ||||||
|  |  | ||||||
|  | There are a couple of methods to improve efficiency when querying, reducing the | ||||||
|  | information returned by the query or efficient dereferencing . | ||||||
|  |  | ||||||
| Retrieving a subset of fields | Retrieving a subset of fields | ||||||
| ============================= | ----------------------------- | ||||||
|  |  | ||||||
| Sometimes a subset of fields on a :class:`~mongoengine.Document` is required, | Sometimes a subset of fields on a :class:`~mongoengine.Document` is required, | ||||||
| and for efficiency only these should be retrieved from the database. This issue | and for efficiency only these should be retrieved from the database. This issue | ||||||
| is especially important for MongoDB, as fields may often be extremely large | is especially important for MongoDB, as fields may often be extremely large | ||||||
| @@ -346,6 +339,27 @@ will be given:: | |||||||
| If you later need the missing fields, just call | If you later need the missing fields, just call | ||||||
| :meth:`~mongoengine.Document.reload` on your document. | :meth:`~mongoengine.Document.reload` on your document. | ||||||
|  |  | ||||||
|  | Getting related data | ||||||
|  | -------------------- | ||||||
|  |  | ||||||
|  | When iterating the results of :class:`~mongoengine.ListField` or | ||||||
|  | :class:`~mongoengine.DictField` we automatically dereference any | ||||||
|  | :class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the | ||||||
|  | number the queries to mongo. | ||||||
|  |  | ||||||
|  | There are times when that efficiency is not enough, documents that have | ||||||
|  | :class:`~mongoengine.ReferenceField` objects or | ||||||
|  | :class:`~mongoengine.GenericReferenceField` objects at the top level are | ||||||
|  | expensive as the number of queries to MongoDB can quickly rise. | ||||||
|  |  | ||||||
|  | To limit the number of queries use | ||||||
|  | :func:`~mongoengine.queryset.QuerySet.select_related` which converts the | ||||||
|  | QuerySet to a list and dereferences as efficiently as possible.  By default | ||||||
|  | :func:`~mongoengine.queryset.QuerySet.select_related` only dereferences any | ||||||
|  | references to the depth of 1 level.  If you have more complicated documents and | ||||||
|  | want to dereference more of the object at once then increasing the :attr:`max_depth` | ||||||
|  | will dereference more levels of the document. | ||||||
|  |  | ||||||
| Advanced queries | Advanced queries | ||||||
| ================ | ================ | ||||||
| Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword | Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword | ||||||
|   | |||||||
| @@ -5,11 +5,13 @@ Signals | |||||||
|  |  | ||||||
| .. versionadded:: 0.5 | .. versionadded:: 0.5 | ||||||
|  |  | ||||||
| Signal support is provided by the excellent `blinker`_ library and | .. note:: | ||||||
| will gracefully fall back if it is not available. |  | ||||||
|  |   Signal support is provided by the excellent `blinker`_ library and | ||||||
|  |   will gracefully fall back if it is not available. | ||||||
|  |  | ||||||
|  |  | ||||||
| The following document signals exist in MongoEngine and are pretty self explaintary: | The following document signals exist in MongoEngine and are pretty self-explanatory: | ||||||
|  |  | ||||||
|   * `mongoengine.signals.pre_init` |   * `mongoengine.signals.pre_init` | ||||||
|   * `mongoengine.signals.post_init` |   * `mongoengine.signals.post_init` | ||||||
| @@ -17,6 +19,8 @@ The following document signals exist in MongoEngine and are pretty self explaint | |||||||
|   * `mongoengine.signals.post_save` |   * `mongoengine.signals.post_save` | ||||||
|   * `mongoengine.signals.pre_delete` |   * `mongoengine.signals.pre_delete` | ||||||
|   * `mongoengine.signals.post_delete` |   * `mongoengine.signals.post_delete` | ||||||
|  |   * `mongoengine.signals.pre_bulk_insert` | ||||||
|  |   * `mongoengine.signals.post_bulk_insert` | ||||||
|  |  | ||||||
| Example usage:: | Example usage:: | ||||||
|  |  | ||||||
| @@ -42,8 +46,8 @@ Example usage:: | |||||||
|                 else: |                 else: | ||||||
|                     logging.debug("Updated") |                     logging.debug("Updated") | ||||||
|  |  | ||||||
|         signals.pre_save.connect(Author.pre_save, sender=Author) |     signals.pre_save.connect(Author.pre_save, sender=Author) | ||||||
|         signals.post_save.connect(Author.post_save, sender=Author) |     signals.post_save.connect(Author.post_save, sender=Author) | ||||||
|  |  | ||||||
|  |  | ||||||
| .. _blinker: http://pypi.python.org/pypi/blinker | .. _blinker: http://pypi.python.org/pypi/blinker | ||||||
|   | |||||||
| @@ -18,6 +18,9 @@ MongoDB. To install it, simply run | |||||||
| :doc:`apireference` | :doc:`apireference` | ||||||
|   The complete API documentation. |   The complete API documentation. | ||||||
|  |  | ||||||
|  | :doc:`upgrade` | ||||||
|  |   How to upgrade MongoEngine. | ||||||
|  |  | ||||||
| :doc:`django` | :doc:`django` | ||||||
|   Using MongoEngine and Django |   Using MongoEngine and Django | ||||||
|  |  | ||||||
| @@ -42,7 +45,8 @@ Also, you can join the developers' `mailing list | |||||||
|  |  | ||||||
| Changes | Changes | ||||||
| ------- | ------- | ||||||
| See the :doc:`changelog` for a full list of changes to MongoEngine. | See the :doc:`changelog` for a full list of changes to MongoEngine and | ||||||
|  | :doc:`upgrade` for upgrade information. | ||||||
|  |  | ||||||
| .. toctree:: | .. toctree:: | ||||||
|    :hidden: |    :hidden: | ||||||
|   | |||||||
| @@ -167,6 +167,11 @@ To delete all the posts if a user is deleted set the rule:: | |||||||
|  |  | ||||||
| See :class:`~mongoengine.ReferenceField` for more information. | See :class:`~mongoengine.ReferenceField` for more information. | ||||||
|  |  | ||||||
|  | ..note:: | ||||||
|  |     MapFields and DictFields currently don't support automatic handling of | ||||||
|  |     deleted references | ||||||
|  |  | ||||||
|  |  | ||||||
| Adding data to our Tumblelog | Adding data to our Tumblelog | ||||||
| ============================ | ============================ | ||||||
| Now that we've defined how our documents will be structured, let's start adding | Now that we've defined how our documents will be structured, let's start adding | ||||||
|   | |||||||
| @@ -2,6 +2,24 @@ | |||||||
| Upgrading | Upgrading | ||||||
| ========= | ========= | ||||||
|  |  | ||||||
|  | 0.5 to 0.6 | ||||||
|  | ========== | ||||||
|  |  | ||||||
|  | Embedded Documents - if you had a `pk` field you will have to rename it from `_id` | ||||||
|  | to `pk` as pk is no longer a property of Embedded Documents. | ||||||
|  |  | ||||||
|  | Reverse Delete Rules in Embedded Documents, MapFields and DictFields now throw | ||||||
|  | an InvalidDocument error as they aren't currently supported. | ||||||
|  |  | ||||||
|  | Document._get_subclasses - Is no longer used and the class method has been removed. | ||||||
|  |  | ||||||
|  | Document.objects.with_id - now raises an InvalidQueryError if used with a filter. | ||||||
|  |  | ||||||
|  | FutureWarning - A future warning has been added to all inherited classes that | ||||||
|  | don't define `allow_inheritance` in their meta. | ||||||
|  |  | ||||||
|  | You may need to update pyMongo to 2.0 for use with Sharding. | ||||||
|  |  | ||||||
| 0.4 to 0.5 | 0.4 to 0.5 | ||||||
| =========== | =========== | ||||||
|  |  | ||||||
| @@ -9,7 +27,7 @@ There have been the following backwards incompatibilities from 0.4 to 0.5.  The | |||||||
| main areas of changed are: choices in fields, map_reduce and collection names. | main areas of changed are: choices in fields, map_reduce and collection names. | ||||||
|  |  | ||||||
| Choice options: | Choice options: | ||||||
| -------------- | --------------- | ||||||
|  |  | ||||||
| Are now expected to be an iterable of tuples, with  the first element in each | Are now expected to be an iterable of tuples, with  the first element in each | ||||||
| tuple being the actual value to be stored. The second element is the | tuple being the actual value to be stored. The second element is the | ||||||
| @@ -58,7 +76,7 @@ To upgrade use a Mixin class to set meta like so :: | |||||||
|     class MyAceDocument(Document, BaseMixin): |     class MyAceDocument(Document, BaseMixin): | ||||||
|         pass |         pass | ||||||
|  |  | ||||||
|     MyAceDocument._get_collection_name() == myacedocument |     MyAceDocument._get_collection_name() == "myacedocument" | ||||||
|  |  | ||||||
| Alternatively, you can rename your collections eg :: | Alternatively, you can rename your collections eg :: | ||||||
|  |  | ||||||
|   | |||||||
| @@ -12,9 +12,7 @@ from signals import * | |||||||
| __all__ = (document.__all__ + fields.__all__ + connection.__all__ + | __all__ = (document.__all__ + fields.__all__ + connection.__all__ + | ||||||
|            queryset.__all__ + signals.__all__) |            queryset.__all__ + signals.__all__) | ||||||
|  |  | ||||||
| __author__ = 'Harry Marr' | VERSION = (0, 6, 11) | ||||||
|  |  | ||||||
| VERSION = (0, 5, 2) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_version(): | def get_version(): | ||||||
|   | |||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,82 +1,166 @@ | |||||||
| from pymongo import Connection | import pymongo | ||||||
| import multiprocessing | from pymongo import Connection, ReplicaSetConnection, uri_parser | ||||||
| import threading |  | ||||||
|  |  | ||||||
| __all__ = ['ConnectionError', 'connect'] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| _connection_defaults = { | __all__ = ['ConnectionError', 'connect', 'register_connection', | ||||||
|     'host': 'localhost', |            'DEFAULT_CONNECTION_NAME'] | ||||||
|     'port': 27017, |  | ||||||
| } |  | ||||||
| _connection = {} |  | ||||||
| _connection_settings = _connection_defaults.copy() |  | ||||||
|  |  | ||||||
| _db_name = None |  | ||||||
| _db_username = None | DEFAULT_CONNECTION_NAME = 'default' | ||||||
| _db_password = None |  | ||||||
| _db = {} |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ConnectionError(Exception): | class ConnectionError(Exception): | ||||||
|     pass |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
| def _get_connection(reconnect=False): | _connection_settings = {} | ||||||
|     """Handles the connection to the database | _connections = {} | ||||||
|  | _dbs = {} | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def register_connection(alias, name, host='localhost', port=27017, | ||||||
|  |                         is_slave=False, read_preference=False, slaves=None, | ||||||
|  |                         username=None, password=None, **kwargs): | ||||||
|  |     """Add a connection. | ||||||
|  |  | ||||||
|  |     :param alias: the name that will be used to refer to this connection | ||||||
|  |         throughout MongoEngine | ||||||
|  |     :param name: the name of the specific database to use | ||||||
|  |     :param host: the host name of the :program:`mongod` instance to connect to | ||||||
|  |     :param port: the port that the :program:`mongod` instance is running on | ||||||
|  |     :param is_slave: whether the connection can act as a slave ** Depreciated pymongo 2.0.1+ | ||||||
|  |     :param read_preference: The read preference for the collection ** Added pymongo 2.1 | ||||||
|  |     :param slaves: a list of aliases of slave connections; each of these must | ||||||
|  |         be a registered connection that has :attr:`is_slave` set to ``True`` | ||||||
|  |     :param username: username to authenticate with | ||||||
|  |     :param password: password to authenticate with | ||||||
|  |     :param kwargs: allow ad-hoc parameters to be passed into the pymongo driver | ||||||
|  |  | ||||||
|     """ |     """ | ||||||
|     global _connection |     global _connection_settings | ||||||
|     identity = get_identity() |  | ||||||
|  |     conn_settings = { | ||||||
|  |         'name': name, | ||||||
|  |         'host': host, | ||||||
|  |         'port': port, | ||||||
|  |         'is_slave': is_slave, | ||||||
|  |         'slaves': slaves or [], | ||||||
|  |         'username': username, | ||||||
|  |         'password': password, | ||||||
|  |         'read_preference': read_preference | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     # Handle uri style connections | ||||||
|  |     if "://" in host: | ||||||
|  |         uri_dict = uri_parser.parse_uri(host) | ||||||
|  |         if uri_dict.get('database') is None: | ||||||
|  |             raise ConnectionError("If using URI style connection include "\ | ||||||
|  |                                   "database name in string") | ||||||
|  |         conn_settings.update({ | ||||||
|  |             'host': host, | ||||||
|  |             'name': uri_dict.get('database'), | ||||||
|  |             'username': uri_dict.get('username'), | ||||||
|  |             'password': uri_dict.get('password'), | ||||||
|  |             'read_preference': read_preference, | ||||||
|  |         }) | ||||||
|  |         if "replicaSet" in host: | ||||||
|  |             conn_settings['replicaSet'] = True | ||||||
|  |  | ||||||
|  |     conn_settings.update(kwargs) | ||||||
|  |     _connection_settings[alias] = conn_settings | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def disconnect(alias=DEFAULT_CONNECTION_NAME): | ||||||
|  |     global _connections | ||||||
|  |     global _dbs | ||||||
|  |  | ||||||
|  |     if alias in _connections: | ||||||
|  |         get_connection(alias=alias).disconnect() | ||||||
|  |         del _connections[alias] | ||||||
|  |     if alias in _dbs: | ||||||
|  |         del _dbs[alias] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | ||||||
|  |     global _connections | ||||||
|     # Connect to the database if not already connected |     # Connect to the database if not already connected | ||||||
|     if _connection.get(identity) is None or reconnect: |     if reconnect: | ||||||
|  |         disconnect(alias) | ||||||
|  |  | ||||||
|  |     if alias not in _connections: | ||||||
|  |         if alias not in _connection_settings: | ||||||
|  |             msg = 'Connection with alias "%s" has not been defined' % alias | ||||||
|  |             if alias == DEFAULT_CONNECTION_NAME: | ||||||
|  |                 msg = 'You have not defined a default connection' | ||||||
|  |             raise ConnectionError(msg) | ||||||
|  |         conn_settings = _connection_settings[alias].copy() | ||||||
|  |  | ||||||
|  |         if hasattr(pymongo, 'version_tuple'):  # Support for 2.1+ | ||||||
|  |             conn_settings.pop('name', None) | ||||||
|  |             conn_settings.pop('slaves', None) | ||||||
|  |             conn_settings.pop('is_slave', None) | ||||||
|  |             conn_settings.pop('username', None) | ||||||
|  |             conn_settings.pop('password', None) | ||||||
|  |         else: | ||||||
|  |             # Get all the slave connections | ||||||
|  |             if 'slaves' in conn_settings: | ||||||
|  |                 slaves = [] | ||||||
|  |                 for slave_alias in conn_settings['slaves']: | ||||||
|  |                     slaves.append(get_connection(slave_alias)) | ||||||
|  |                 conn_settings['slaves'] = slaves | ||||||
|  |                 conn_settings.pop('read_preference', None) | ||||||
|  |  | ||||||
|  |         connection_class = Connection | ||||||
|  |         if 'replicaSet' in conn_settings: | ||||||
|  |             conn_settings['hosts_or_uri'] = conn_settings.pop('host', None) | ||||||
|  |             # Discard port since it can't be used on ReplicaSetConnection | ||||||
|  |             conn_settings.pop('port', None) | ||||||
|  |             # Discard replicaSet if not base string | ||||||
|  |             if not isinstance(conn_settings['replicaSet'], basestring): | ||||||
|  |                 conn_settings.pop('replicaSet', None) | ||||||
|  |             connection_class = ReplicaSetConnection | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             _connection[identity] = Connection(**_connection_settings) |             _connections[alias] = connection_class(**conn_settings) | ||||||
|         except Exception, e: |         except Exception, e: | ||||||
|             raise ConnectionError("Cannot connect to the database:\n%s" % e) |             raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e)) | ||||||
|     return _connection[identity] |     return _connections[alias] | ||||||
|  |  | ||||||
| def _get_db(reconnect=False): |  | ||||||
|     """Handles database connections and authentication based on the current | def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): | ||||||
|     identity |     global _dbs | ||||||
|  |     if reconnect: | ||||||
|  |         disconnect(alias) | ||||||
|  |  | ||||||
|  |     if alias not in _dbs: | ||||||
|  |         conn = get_connection(alias) | ||||||
|  |         conn_settings = _connection_settings[alias] | ||||||
|  |         _dbs[alias] = conn[conn_settings['name']] | ||||||
|  |         # Authenticate if necessary | ||||||
|  |         if conn_settings['username'] and conn_settings['password']: | ||||||
|  |             _dbs[alias].authenticate(conn_settings['username'], | ||||||
|  |                                      conn_settings['password']) | ||||||
|  |     return _dbs[alias] | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs): | ||||||
|  |     """Connect to the database specified by the 'db' argument. | ||||||
|  |  | ||||||
|  |     Connection settings may be provided here as well if the database is not | ||||||
|  |     running on the default port on localhost. If authentication is needed, | ||||||
|  |     provide username and password arguments as well. | ||||||
|  |  | ||||||
|  |     Multiple databases are supported by using aliases.  Provide a separate | ||||||
|  |     `alias` to connect to a different instance of :program:`mongod`. | ||||||
|  |  | ||||||
|  |     .. versionchanged:: 0.6 - added multiple database support. | ||||||
|     """ |     """ | ||||||
|     global _db, _connection |     global _connections | ||||||
|     identity = get_identity() |     if alias not in _connections: | ||||||
|     # Connect if not already connected |         register_connection(alias, db, **kwargs) | ||||||
|     if _connection.get(identity) is None or reconnect: |  | ||||||
|         _connection[identity] = _get_connection(reconnect=reconnect) |  | ||||||
|  |  | ||||||
|     if _db.get(identity) is None or reconnect: |     return get_connection(alias) | ||||||
|         # _db_name will be None if the user hasn't called connect() |  | ||||||
|         if _db_name is None: |  | ||||||
|             raise ConnectionError('Not connected to the database') |  | ||||||
|  |  | ||||||
|         # Get DB from current connection and authenticate if necessary |  | ||||||
|         _db[identity] = _connection[identity][_db_name] |  | ||||||
|         if _db_username and _db_password: |  | ||||||
|             _db[identity].authenticate(_db_username, _db_password) |  | ||||||
|  |  | ||||||
|     return _db[identity] |  | ||||||
|  |  | ||||||
| def get_identity(): |  | ||||||
|     """Creates an identity key based on the current process and thread |  | ||||||
|     identity. |  | ||||||
|     """ |  | ||||||
|     identity = multiprocessing.current_process()._identity |  | ||||||
|     identity = 0 if not identity else identity[0] |  | ||||||
|  |  | ||||||
|     identity = (identity, threading.current_thread().ident) |  | ||||||
|     return identity |  | ||||||
|  |  | ||||||
| def connect(db, username=None, password=None, **kwargs): |  | ||||||
|     """Connect to the database specified by the 'db' argument. Connection |  | ||||||
|     settings may be provided here as well if the database is not running on |  | ||||||
|     the default port on localhost. If authentication is needed, provide |  | ||||||
|     username and password arguments as well. |  | ||||||
|     """ |  | ||||||
|     global _connection_settings, _db_name, _db_username, _db_password, _db |  | ||||||
|     _connection_settings = dict(_connection_defaults, **kwargs) |  | ||||||
|     _db_name = db |  | ||||||
|     _db_username = username |  | ||||||
|     _db_password = password |  | ||||||
|     return _get_db(reconnect=True) |  | ||||||
|  |  | ||||||
|  | # Support old naming convention | ||||||
|  | _get_connection = get_connection | ||||||
|  | _get_db = get_db | ||||||
|   | |||||||
| @@ -1,17 +1,15 @@ | |||||||
| import operator | from bson import DBRef, SON | ||||||
|  |  | ||||||
| import pymongo | from base import (BaseDict, BaseList, TopLevelDocumentMetaclass, get_document) | ||||||
|  | from fields import (ReferenceField, ListField, DictField, MapField) | ||||||
| from base import BaseDict, BaseList, get_document, TopLevelDocumentMetaclass | from connection import get_db | ||||||
| from fields import ReferenceField |  | ||||||
| from connection import _get_db |  | ||||||
| from queryset import QuerySet | from queryset import QuerySet | ||||||
| from document import Document | from document import Document | ||||||
|  |  | ||||||
|  |  | ||||||
| class DeReference(object): | class DeReference(object): | ||||||
|  |  | ||||||
|     def __call__(self, items, max_depth=1, instance=None, name=None, get=False): |     def __call__(self, items, max_depth=1, instance=None, name=None): | ||||||
|         """ |         """ | ||||||
|         Cheaply dereferences the items to a set depth. |         Cheaply dereferences the items to a set depth. | ||||||
|         Also handles the convertion of complex data types. |         Also handles the convertion of complex data types. | ||||||
| @@ -45,7 +43,7 @@ class DeReference(object): | |||||||
|  |  | ||||||
|         self.reference_map = self._find_references(items) |         self.reference_map = self._find_references(items) | ||||||
|         self.object_map = self._fetch_objects(doc_type=doc_type) |         self.object_map = self._fetch_objects(doc_type=doc_type) | ||||||
|         return self._attach_objects(items, 0, instance, name, get) |         return self._attach_objects(items, 0, instance, name) | ||||||
|  |  | ||||||
|     def _find_references(self, items, depth=0): |     def _find_references(self, items, depth=0): | ||||||
|         """ |         """ | ||||||
| @@ -55,7 +53,7 @@ class DeReference(object): | |||||||
|         :param depth: The current depth of recursion |         :param depth: The current depth of recursion | ||||||
|         """ |         """ | ||||||
|         reference_map = {} |         reference_map = {} | ||||||
|         if not items: |         if not items or depth >= self.max_depth: | ||||||
|             return reference_map |             return reference_map | ||||||
|  |  | ||||||
|         # Determine the iterator to use |         # Determine the iterator to use | ||||||
| @@ -65,13 +63,14 @@ class DeReference(object): | |||||||
|             iterator = items.iteritems() |             iterator = items.iteritems() | ||||||
|  |  | ||||||
|         # Recursively find dbreferences |         # Recursively find dbreferences | ||||||
|  |         depth += 1 | ||||||
|         for k, item in iterator: |         for k, item in iterator: | ||||||
|             if hasattr(item, '_fields'): |             if hasattr(item, '_fields'): | ||||||
|                 for field_name, field in item._fields.iteritems(): |                 for field_name, field in item._fields.iteritems(): | ||||||
|                     v = item._data.get(field_name, None) |                     v = item._data.get(field_name, None) | ||||||
|                     if isinstance(v, (pymongo.dbref.DBRef)): |                     if isinstance(v, (DBRef)): | ||||||
|                         reference_map.setdefault(field.document_type, []).append(v.id) |                         reference_map.setdefault(field.document_type, []).append(v.id) | ||||||
|                     elif isinstance(v, (dict, pymongo.son.SON)) and '_ref' in v: |                     elif isinstance(v, (dict, SON)) and '_ref' in v: | ||||||
|                         reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id) |                         reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id) | ||||||
|                     elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: |                     elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||||
|                         field_cls = getattr(getattr(field, 'field', None), 'document_type', None) |                         field_cls = getattr(getattr(field, 'field', None), 'document_type', None) | ||||||
| @@ -80,15 +79,15 @@ class DeReference(object): | |||||||
|                             if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): |                             if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): | ||||||
|                                 key = field_cls |                                 key = field_cls | ||||||
|                             reference_map.setdefault(key, []).extend(refs) |                             reference_map.setdefault(key, []).extend(refs) | ||||||
|             elif isinstance(item, (pymongo.dbref.DBRef)): |             elif isinstance(item, (DBRef)): | ||||||
|                 reference_map.setdefault(item.collection, []).append(item.id) |                 reference_map.setdefault(item.collection, []).append(item.id) | ||||||
|             elif isinstance(item, (dict, pymongo.son.SON)) and '_ref' in item: |             elif isinstance(item, (dict, SON)) and '_ref' in item: | ||||||
|                 reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id) |                 reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id) | ||||||
|             elif isinstance(item, (dict, list, tuple)) and depth <= self.max_depth: |             elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: | ||||||
|                 references = self._find_references(item, depth) |                 references = self._find_references(item, depth - 1) | ||||||
|                 for key, refs in references.iteritems(): |                 for key, refs in references.iteritems(): | ||||||
|                     reference_map.setdefault(key, []).extend(refs) |                     reference_map.setdefault(key, []).extend(refs) | ||||||
|         depth += 1 |  | ||||||
|         return reference_map |         return reference_map | ||||||
|  |  | ||||||
|     def _fetch_objects(self, doc_type=None): |     def _fetch_objects(self, doc_type=None): | ||||||
| @@ -103,16 +102,26 @@ class DeReference(object): | |||||||
|                 for key, doc in references.iteritems(): |                 for key, doc in references.iteritems(): | ||||||
|                     object_map[key] = doc |                     object_map[key] = doc | ||||||
|             else:  # Generic reference: use the refs data to convert to document |             else:  # Generic reference: use the refs data to convert to document | ||||||
|                 references = _get_db()[col].find({'_id': {'$in': refs}}) |                 if doc_type and not isinstance(doc_type, (ListField, DictField, MapField,) ): | ||||||
|                 for ref in references: |                     references = doc_type._get_db()[col].find({'_id': {'$in': refs}}) | ||||||
|                     if '_cls' in ref: |                     for ref in references: | ||||||
|                         doc = get_document(ref['_cls'])._from_son(ref) |  | ||||||
|                     else: |  | ||||||
|                         doc = doc_type._from_son(ref) |                         doc = doc_type._from_son(ref) | ||||||
|                     object_map[doc.id] = doc |                         object_map[doc.id] = doc | ||||||
|  |                 else: | ||||||
|  |                     references = get_db()[col].find({'_id': {'$in': refs}}) | ||||||
|  |                     for ref in references: | ||||||
|  |                         if '_cls' in ref: | ||||||
|  |                             doc = get_document(ref["_cls"])._from_son(ref) | ||||||
|  |                         elif doc_type is None: | ||||||
|  |                             doc = get_document( | ||||||
|  |                                 ''.join(x.capitalize() | ||||||
|  |                                         for x in col.split('_')))._from_son(ref) | ||||||
|  |                         else: | ||||||
|  |                             doc = doc_type._from_son(ref) | ||||||
|  |                         object_map[doc.id] = doc | ||||||
|         return object_map |         return object_map | ||||||
|  |  | ||||||
|     def _attach_objects(self, items, depth=0, instance=None, name=None, get=False): |     def _attach_objects(self, items, depth=0, instance=None, name=None): | ||||||
|         """ |         """ | ||||||
|         Recursively finds all db references to be dereferenced |         Recursively finds all db references to be dereferenced | ||||||
|  |  | ||||||
| @@ -122,7 +131,6 @@ class DeReference(object): | |||||||
|             :class:`~mongoengine.base.ComplexBaseField` |             :class:`~mongoengine.base.ComplexBaseField` | ||||||
|         :param name: The name of the field, used for tracking changes by |         :param name: The name of the field, used for tracking changes by | ||||||
|             :class:`~mongoengine.base.ComplexBaseField` |             :class:`~mongoengine.base.ComplexBaseField` | ||||||
|         :param get: A boolean determining if being called by __get__ |  | ||||||
|         """ |         """ | ||||||
|         if not items: |         if not items: | ||||||
|             if isinstance(items, (BaseDict, BaseList)): |             if isinstance(items, (BaseDict, BaseList)): | ||||||
| @@ -130,17 +138,16 @@ class DeReference(object): | |||||||
|  |  | ||||||
|             if instance: |             if instance: | ||||||
|                 if isinstance(items, dict): |                 if isinstance(items, dict): | ||||||
|                     return BaseDict(items, instance=instance, name=name) |                     return BaseDict(items, instance, name) | ||||||
|                 else: |                 else: | ||||||
|                     return BaseList(items, instance=instance, name=name) |                     return BaseList(items, instance, name) | ||||||
|  |  | ||||||
|         if isinstance(items, (dict, pymongo.son.SON)): |         if isinstance(items, (dict, SON)): | ||||||
|             if '_ref' in items: |             if '_ref' in items: | ||||||
|                 return self.object_map.get(items['_ref'].id, items) |                 return self.object_map.get(items['_ref'].id, items) | ||||||
|             elif '_types' in items and '_cls' in items: |             elif '_types' in items and '_cls' in items: | ||||||
|                 doc = get_document(items['_cls'])._from_son(items) |                 doc = get_document(items['_cls'])._from_son(items) | ||||||
|                 if not get: |                 doc._data = self._attach_objects(doc._data, depth, doc, name) | ||||||
|                     doc._data = self._attach_objects(doc._data, depth, doc, name, get) |  | ||||||
|                 return doc |                 return doc | ||||||
|  |  | ||||||
|         if not hasattr(items, 'items'): |         if not hasattr(items, 'items'): | ||||||
| @@ -152,6 +159,7 @@ class DeReference(object): | |||||||
|             iterator = items.iteritems() |             iterator = items.iteritems() | ||||||
|             data = {} |             data = {} | ||||||
|  |  | ||||||
|  |         depth += 1 | ||||||
|         for k, v in iterator: |         for k, v in iterator: | ||||||
|             if is_list: |             if is_list: | ||||||
|                 data.append(v) |                 data.append(v) | ||||||
| @@ -163,24 +171,22 @@ class DeReference(object): | |||||||
|             elif hasattr(v, '_fields'): |             elif hasattr(v, '_fields'): | ||||||
|                 for field_name, field in v._fields.iteritems(): |                 for field_name, field in v._fields.iteritems(): | ||||||
|                     v = data[k]._data.get(field_name, None) |                     v = data[k]._data.get(field_name, None) | ||||||
|                     if isinstance(v, (pymongo.dbref.DBRef)): |                     if isinstance(v, (DBRef)): | ||||||
|                         data[k]._data[field_name] = self.object_map.get(v.id, v) |                         data[k]._data[field_name] = self.object_map.get(v.id, v) | ||||||
|                     elif isinstance(v, (dict, pymongo.son.SON)) and '_ref' in v: |                     elif isinstance(v, (dict, SON)) and '_ref' in v: | ||||||
|                         data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v) |                         data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v) | ||||||
|                     elif isinstance(v, dict) and depth < self.max_depth: |                     elif isinstance(v, dict) and depth <= self.max_depth: | ||||||
|                         data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name, get=get) |                         data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name) | ||||||
|                     elif isinstance(v, (list, tuple)): |                     elif isinstance(v, (list, tuple)) and depth <= self.max_depth: | ||||||
|                         data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name, get=get) |                         data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name) | ||||||
|             elif isinstance(v, (dict, list, tuple)) and depth < self.max_depth: |             elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: | ||||||
|                 data[k] = self._attach_objects(v, depth, instance=instance, name=name, get=get) |                 data[k] = self._attach_objects(v, depth - 1, instance=instance, name=name) | ||||||
|             elif hasattr(v, 'id'): |             elif hasattr(v, 'id'): | ||||||
|                 data[k] = self.object_map.get(v.id, v) |                 data[k] = self.object_map.get(v.id, v) | ||||||
|  |  | ||||||
|         if instance and name: |         if instance and name: | ||||||
|             if is_list: |             if is_list: | ||||||
|                 return BaseList(data, instance=instance, name=name) |                 return BaseList(data, instance, name) | ||||||
|             return BaseDict(data, instance=instance, name=name) |             return BaseDict(data, instance, name) | ||||||
|         depth += 1 |         depth += 1 | ||||||
|         return data |         return data | ||||||
|  |  | ||||||
| dereference = DeReference() |  | ||||||
|   | |||||||
| @@ -1,23 +1,39 @@ | |||||||
|  | import datetime | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
|  |  | ||||||
| from django.utils.hashcompat import md5_constructor, sha_constructor |  | ||||||
| from django.utils.encoding import smart_str | from django.utils.encoding import smart_str | ||||||
| from django.contrib.auth.models import AnonymousUser | from django.contrib.auth.models import AnonymousUser | ||||||
| from django.utils.translation import ugettext_lazy as _ | from django.utils.translation import ugettext_lazy as _ | ||||||
|  |  | ||||||
| import datetime | try: | ||||||
|  |     from django.contrib.auth.hashers import check_password, make_password | ||||||
|  | except ImportError: | ||||||
|  |     """Handle older versions of Django""" | ||||||
|  |     from django.utils.hashcompat import md5_constructor, sha_constructor | ||||||
|  |  | ||||||
|  |     def get_hexdigest(algorithm, salt, raw_password): | ||||||
|  |         raw_password, salt = smart_str(raw_password), smart_str(salt) | ||||||
|  |         if algorithm == 'md5': | ||||||
|  |             return md5_constructor(salt + raw_password).hexdigest() | ||||||
|  |         elif algorithm == 'sha1': | ||||||
|  |             return sha_constructor(salt + raw_password).hexdigest() | ||||||
|  |         raise ValueError('Got unknown password algorithm type in password') | ||||||
|  |  | ||||||
|  |     def check_password(raw_password, password): | ||||||
|  |         algo, salt, hash = password.split('$') | ||||||
|  |         return hash == get_hexdigest(algo, salt, raw_password) | ||||||
|  |  | ||||||
|  |     def make_password(raw_password): | ||||||
|  |         from random import random | ||||||
|  |         algo = 'sha1' | ||||||
|  |         salt = get_hexdigest(algo, str(random()), str(random()))[:5] | ||||||
|  |         hash = get_hexdigest(algo, salt, raw_password) | ||||||
|  |         return '%s$%s$%s' % (algo, salt, hash) | ||||||
|  |  | ||||||
|  |  | ||||||
| REDIRECT_FIELD_NAME = 'next' | REDIRECT_FIELD_NAME = 'next' | ||||||
|  |  | ||||||
| def get_hexdigest(algorithm, salt, raw_password): |  | ||||||
|     raw_password, salt = smart_str(raw_password), smart_str(salt) |  | ||||||
|     if algorithm == 'md5': |  | ||||||
|         return md5_constructor(salt + raw_password).hexdigest() |  | ||||||
|     elif algorithm == 'sha1': |  | ||||||
|         return sha_constructor(salt + raw_password).hexdigest() |  | ||||||
|     raise ValueError('Got unknown password algorithm type in password') |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class User(Document): | class User(Document): | ||||||
|     """A User document that aims to mirror most of the API specified by Django |     """A User document that aims to mirror most of the API specified by Django | ||||||
|     at http://docs.djangoproject.com/en/dev/topics/auth/#users |     at http://docs.djangoproject.com/en/dev/topics/auth/#users | ||||||
| @@ -34,7 +50,7 @@ class User(Document): | |||||||
|     email = EmailField(verbose_name=_('e-mail address')) |     email = EmailField(verbose_name=_('e-mail address')) | ||||||
|     password = StringField(max_length=128, |     password = StringField(max_length=128, | ||||||
|                            verbose_name=_('password'), |                            verbose_name=_('password'), | ||||||
|                            help_text=_("Use '[algo]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>.")) |                            help_text=_("Use '[algo]$[iterations]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>.")) | ||||||
|     is_staff = BooleanField(default=False, |     is_staff = BooleanField(default=False, | ||||||
|                             verbose_name=_('staff status'), |                             verbose_name=_('staff status'), | ||||||
|                             help_text=_("Designates whether the user can log into this admin site.")) |                             help_text=_("Designates whether the user can log into this admin site.")) | ||||||
| @@ -50,6 +66,7 @@ class User(Document): | |||||||
|                                 verbose_name=_('date joined')) |                                 verbose_name=_('date joined')) | ||||||
|  |  | ||||||
|     meta = { |     meta = { | ||||||
|  |         'allow_inheritance': True, | ||||||
|         'indexes': [ |         'indexes': [ | ||||||
|             {'fields': ['username'], 'unique': True} |             {'fields': ['username'], 'unique': True} | ||||||
|         ] |         ] | ||||||
| @@ -75,11 +92,7 @@ class User(Document): | |||||||
|         assigning to :attr:`~mongoengine.django.auth.User.password` as the |         assigning to :attr:`~mongoengine.django.auth.User.password` as the | ||||||
|         password is hashed before storage. |         password is hashed before storage. | ||||||
|         """ |         """ | ||||||
|         from random import random |         self.password = make_password(raw_password) | ||||||
|         algo = 'sha1' |  | ||||||
|         salt = get_hexdigest(algo, str(random()), str(random()))[:5] |  | ||||||
|         hash = get_hexdigest(algo, salt, raw_password) |  | ||||||
|         self.password = '%s$%s$%s' % (algo, salt, hash) |  | ||||||
|         self.save() |         self.save() | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
| @@ -89,8 +102,7 @@ class User(Document): | |||||||
|         :attr:`~mongoengine.django.auth.User.password` as the password is |         :attr:`~mongoengine.django.auth.User.password` as the password is | ||||||
|         hashed before storage. |         hashed before storage. | ||||||
|         """ |         """ | ||||||
|         algo, salt, hash = self.password.split('$') |         return check_password(raw_password, self.password) | ||||||
|         return hash == get_hexdigest(algo, salt, raw_password) |  | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def create_user(cls, username, password, email=None): |     def create_user(cls, username, password, email=None): | ||||||
|   | |||||||
| @@ -1,3 +1,6 @@ | |||||||
|  | from datetime import datetime | ||||||
|  |  | ||||||
|  | from django.conf import settings | ||||||
| from django.contrib.sessions.backends.base import SessionBase, CreateError | from django.contrib.sessions.backends.base import SessionBase, CreateError | ||||||
| from django.core.exceptions import SuspiciousOperation | from django.core.exceptions import SuspiciousOperation | ||||||
| from django.utils.encoding import force_unicode | from django.utils.encoding import force_unicode | ||||||
| @@ -5,8 +8,12 @@ from django.utils.encoding import force_unicode | |||||||
| from mongoengine.document import Document | from mongoengine.document import Document | ||||||
| from mongoengine import fields | from mongoengine import fields | ||||||
| from mongoengine.queryset import OperationError | from mongoengine.queryset import OperationError | ||||||
|  | from mongoengine.connection import DEFAULT_CONNECTION_NAME | ||||||
|  |  | ||||||
| from datetime import datetime |  | ||||||
|  | MONGOENGINE_SESSION_DB_ALIAS = getattr( | ||||||
|  |     settings, 'MONGOENGINE_SESSION_DB_ALIAS', | ||||||
|  |     DEFAULT_CONNECTION_NAME) | ||||||
|  |  | ||||||
|  |  | ||||||
| class MongoSession(Document): | class MongoSession(Document): | ||||||
| @@ -14,7 +21,9 @@ class MongoSession(Document): | |||||||
|     session_data = fields.StringField() |     session_data = fields.StringField() | ||||||
|     expire_date = fields.DateTimeField() |     expire_date = fields.DateTimeField() | ||||||
|  |  | ||||||
|     meta = {'collection': 'django_session', 'allow_inheritance': False} |     meta = {'collection': 'django_session', | ||||||
|  |             'db_alias': MONGOENGINE_SESSION_DB_ALIAS, | ||||||
|  |             'allow_inheritance': False} | ||||||
|  |  | ||||||
|  |  | ||||||
| class SessionStore(SessionBase): | class SessionStore(SessionBase): | ||||||
| @@ -35,7 +44,7 @@ class SessionStore(SessionBase): | |||||||
|  |  | ||||||
|     def create(self): |     def create(self): | ||||||
|         while True: |         while True: | ||||||
|             self.session_key = self._get_new_session_key() |             self._session_key = self._get_new_session_key() | ||||||
|             try: |             try: | ||||||
|                 self.save(must_create=True) |                 self.save(must_create=True) | ||||||
|             except CreateError: |             except CreateError: | ||||||
| @@ -45,6 +54,8 @@ class SessionStore(SessionBase): | |||||||
|             return |             return | ||||||
|  |  | ||||||
|     def save(self, must_create=False): |     def save(self, must_create=False): | ||||||
|  |         if self.session_key is None: | ||||||
|  |             self._session_key = self._get_new_session_key() | ||||||
|         s = MongoSession(session_key=self.session_key) |         s = MongoSession(session_key=self.session_key) | ||||||
|         s.session_data = self.encode(self._get_session(no_load=must_create)) |         s.session_data = self.encode(self._get_session(no_load=must_create)) | ||||||
|         s.expire_date = self.get_expiry_date() |         s.expire_date = self.get_expiry_date() | ||||||
|   | |||||||
| @@ -10,7 +10,7 @@ class MongoTestCase(TestCase): | |||||||
|     """ |     """ | ||||||
|     db_name = 'test_%s' % settings.MONGO_DATABASE_NAME |     db_name = 'test_%s' % settings.MONGO_DATABASE_NAME | ||||||
|     def __init__(self, methodName='runtest'): |     def __init__(self, methodName='runtest'): | ||||||
|         self.db = connect(self.db_name) |         self.db = connect(self.db_name).get_db() | ||||||
|         super(MongoTestCase, self).__init__(methodName) |         super(MongoTestCase, self).__init__(methodName) | ||||||
|  |  | ||||||
|     def _post_teardown(self): |     def _post_teardown(self): | ||||||
|   | |||||||
| @@ -1,13 +1,15 @@ | |||||||
| from mongoengine import signals |  | ||||||
| from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, |  | ||||||
|                   ValidationError, BaseDict, BaseList) |  | ||||||
| from queryset import OperationError |  | ||||||
| from connection import _get_db |  | ||||||
|  |  | ||||||
| import pymongo | import pymongo | ||||||
|  |  | ||||||
| __all__ = ['Document', 'EmbeddedDocument', 'ValidationError', | from bson.dbref import DBRef | ||||||
|            'OperationError', 'InvalidCollectionError'] |  | ||||||
|  | from mongoengine import signals | ||||||
|  | from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, | ||||||
|  |                   BaseDict, BaseList) | ||||||
|  | from queryset import OperationError | ||||||
|  | from connection import get_db, DEFAULT_CONNECTION_NAME | ||||||
|  |  | ||||||
|  | __all__ = ['Document', 'EmbeddedDocument', 'DynamicDocument', | ||||||
|  |            'DynamicEmbeddedDocument', 'OperationError', 'InvalidCollectionError'] | ||||||
|  |  | ||||||
|  |  | ||||||
| class InvalidCollectionError(Exception): | class InvalidCollectionError(Exception): | ||||||
| @@ -23,6 +25,10 @@ class EmbeddedDocument(BaseDocument): | |||||||
|  |  | ||||||
|     __metaclass__ = DocumentMetaclass |     __metaclass__ = DocumentMetaclass | ||||||
|  |  | ||||||
|  |     def __init__(self, *args, **kwargs): | ||||||
|  |         super(EmbeddedDocument, self).__init__(*args, **kwargs) | ||||||
|  |         self._changed_fields = [] | ||||||
|  |  | ||||||
|     def __delattr__(self, *args, **kwargs): |     def __delattr__(self, *args, **kwargs): | ||||||
|         """Handle deletions of fields""" |         """Handle deletions of fields""" | ||||||
|         field_name = args[0] |         field_name = args[0] | ||||||
| @@ -34,6 +40,10 @@ class EmbeddedDocument(BaseDocument): | |||||||
|         else: |         else: | ||||||
|             super(EmbeddedDocument, self).__delattr__(*args, **kwargs) |             super(EmbeddedDocument, self).__delattr__(*args, **kwargs) | ||||||
|  |  | ||||||
|  |     def __eq__(self, other): | ||||||
|  |         if isinstance(other, self.__class__): | ||||||
|  |             return self._data == other._data | ||||||
|  |         return False | ||||||
|  |  | ||||||
|  |  | ||||||
| class Document(BaseDocument): | class Document(BaseDocument): | ||||||
| @@ -70,6 +80,12 @@ class Document(BaseDocument): | |||||||
|     names. Index direction may be specified by prefixing the field names with |     names. Index direction may be specified by prefixing the field names with | ||||||
|     a **+** or **-** sign. |     a **+** or **-** sign. | ||||||
|  |  | ||||||
|  |     Automatic index creation can be disabled by specifying | ||||||
|  |     attr:`auto_create_index` in the :attr:`meta` dictionary. If this is set to | ||||||
|  |     False then indexes will not be created by MongoEngine.  This is useful in | ||||||
|  |     production systems where index creation is performed as part of a deployment | ||||||
|  |     system. | ||||||
|  |  | ||||||
|     By default, _types will be added to the start of every index (that |     By default, _types will be added to the start of every index (that | ||||||
|     doesn't contain a list) if allow_inheritence is True. This can be |     doesn't contain a list) if allow_inheritence is True. This can be | ||||||
|     disabled by either setting types to False on the specific index or |     disabled by either setting types to False on the specific index or | ||||||
| @@ -77,42 +93,57 @@ class Document(BaseDocument): | |||||||
|     """ |     """ | ||||||
|     __metaclass__ = TopLevelDocumentMetaclass |     __metaclass__ = TopLevelDocumentMetaclass | ||||||
|  |  | ||||||
|     @classmethod |     @apply | ||||||
|     def _get_collection(self): |     def pk(): | ||||||
|         """Returns the collection for the document.""" |         """Primary key alias | ||||||
|         db = _get_db() |         """ | ||||||
|         collection_name = self._get_collection_name() |         def fget(self): | ||||||
|  |             return getattr(self, self._meta['id_field']) | ||||||
|  |         def fset(self, value): | ||||||
|  |             return setattr(self, self._meta['id_field'], value) | ||||||
|  |         return property(fget, fset) | ||||||
|  |  | ||||||
|         if not hasattr(self, '_collection') or self._collection is None: |     @classmethod | ||||||
|  |     def _get_db(cls): | ||||||
|  |         """Some Model using other db_alias""" | ||||||
|  |         return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME )) | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def _get_collection(cls): | ||||||
|  |         """Returns the collection for the document.""" | ||||||
|  |         if not hasattr(cls, '_collection') or cls._collection is None: | ||||||
|  |             db = cls._get_db() | ||||||
|  |             collection_name = cls._get_collection_name() | ||||||
|             # Create collection as a capped collection if specified |             # Create collection as a capped collection if specified | ||||||
|             if self._meta['max_size'] or self._meta['max_documents']: |             if cls._meta['max_size'] or cls._meta['max_documents']: | ||||||
|                 # Get max document limit and max byte size from meta |                 # Get max document limit and max byte size from meta | ||||||
|                 max_size = self._meta['max_size'] or 10000000  # 10MB default |                 max_size = cls._meta['max_size'] or 10000000  # 10MB default | ||||||
|                 max_documents = self._meta['max_documents'] |                 max_documents = cls._meta['max_documents'] | ||||||
|  |  | ||||||
|                 if collection_name in db.collection_names(): |                 if collection_name in db.collection_names(): | ||||||
|                     self._collection = db[collection_name] |                     cls._collection = db[collection_name] | ||||||
|                     # The collection already exists, check if its capped |                     # The collection already exists, check if its capped | ||||||
|                     # options match the specified capped options |                     # options match the specified capped options | ||||||
|                     options = self._collection.options() |                     options = cls._collection.options() | ||||||
|                     if options.get('max') != max_documents or \ |                     if options.get('max') != max_documents or \ | ||||||
|                        options.get('size') != max_size: |                        options.get('size') != max_size: | ||||||
|                         msg = ('Cannot create collection "%s" as a capped ' |                         msg = ('Cannot create collection "%s" as a capped ' | ||||||
|                                'collection as it already exists') % self._collection |                                'collection as it already exists') % cls._collection | ||||||
|                         raise InvalidCollectionError(msg) |                         raise InvalidCollectionError(msg) | ||||||
|                 else: |                 else: | ||||||
|                     # Create the collection as a capped collection |                     # Create the collection as a capped collection | ||||||
|                     opts = {'capped': True, 'size': max_size} |                     opts = {'capped': True, 'size': max_size} | ||||||
|                     if max_documents: |                     if max_documents: | ||||||
|                         opts['max'] = max_documents |                         opts['max'] = max_documents | ||||||
|                     self._collection = db.create_collection( |                     cls._collection = db.create_collection( | ||||||
|                         collection_name, **opts |                         collection_name, **opts | ||||||
|                     ) |                     ) | ||||||
|             else: |             else: | ||||||
|                 self._collection = db[collection_name] |                 cls._collection = db[collection_name] | ||||||
|         return self._collection |         return cls._collection | ||||||
|  |  | ||||||
|     def save(self, safe=True, force_insert=False, validate=True, write_options=None, _refs=None): |     def save(self, safe=True, force_insert=False, validate=True, write_options=None, | ||||||
|  |             cascade=None, cascade_kwargs=None, _refs=None): | ||||||
|         """Save the :class:`~mongoengine.Document` to the database. If the |         """Save the :class:`~mongoengine.Document` to the database. If the | ||||||
|         document already exists, it will be updated, otherwise it will be |         document already exists, it will be updated, otherwise it will be | ||||||
|         created. |         created. | ||||||
| @@ -128,16 +159,25 @@ class Document(BaseDocument): | |||||||
|                 :meth:`~pymongo.collection.Collection.save` OR |                 :meth:`~pymongo.collection.Collection.save` OR | ||||||
|                 :meth:`~pymongo.collection.Collection.insert` |                 :meth:`~pymongo.collection.Collection.insert` | ||||||
|                 which will be used as options for the resultant ``getLastError`` command. |                 which will be used as options for the resultant ``getLastError`` command. | ||||||
|                 For example, ``save(..., w=2, fsync=True)`` will wait until at least two servers |                 For example, ``save(..., write_options={w: 2, fsync: True}, ...)`` will | ||||||
|                 have recorded the write and will force an fsync on each server being written to. |                 wait until at least two servers have recorded the write and will force an | ||||||
|  |                 fsync on each server being written to. | ||||||
|  |         :param cascade: Sets the flag for cascading saves.  You can set a default by setting | ||||||
|  |             "cascade" in the document __meta__ | ||||||
|  |         :param cascade_kwargs: optional kwargs dictionary to be passed throw to cascading saves | ||||||
|  |         :param _refs: A list of processed references used in cascading saves | ||||||
|  |  | ||||||
|         .. versionchanged:: 0.5 |         .. versionchanged:: 0.5 | ||||||
|             In existing documents it only saves changed fields using set / unset |             In existing documents it only saves changed fields using set / unset | ||||||
|             Saves are cascaded and any :class:`~pymongo.dbref.DBRef` objects |             Saves are cascaded and any :class:`~bson.dbref.DBRef` objects | ||||||
|             that have changes are saved as well. |             that have changes are saved as well. | ||||||
|         """ |         .. versionchanged:: 0.6 | ||||||
|         from fields import ReferenceField, GenericReferenceField |             Cascade saves are optional = defaults to True, if you want fine grain | ||||||
|  |             control then you can turn off using document meta['cascade'] = False | ||||||
|  |             Also you can pass different kwargs to the cascade save using cascade_kwargs | ||||||
|  |             which overwrites the existing kwargs with custom values | ||||||
|  |  | ||||||
|  |         """ | ||||||
|         signals.pre_save.send(self.__class__, document=self) |         signals.pre_save.send(self.__class__, document=self) | ||||||
|  |  | ||||||
|         if validate: |         if validate: | ||||||
| @@ -148,11 +188,11 @@ class Document(BaseDocument): | |||||||
|  |  | ||||||
|         doc = self.to_mongo() |         doc = self.to_mongo() | ||||||
|  |  | ||||||
|         created = '_id' in doc |         created = force_insert or '_id' not in doc | ||||||
|         creation_mode = force_insert or not created |  | ||||||
|         try: |         try: | ||||||
|             collection = self.__class__.objects._collection |             collection = self.__class__.objects._collection | ||||||
|             if creation_mode: |             if created: | ||||||
|                 if force_insert: |                 if force_insert: | ||||||
|                     object_id = collection.insert(doc, safe=safe, **write_options) |                     object_id = collection.insert(doc, safe=safe, **write_options) | ||||||
|                 else: |                 else: | ||||||
| @@ -160,21 +200,34 @@ class Document(BaseDocument): | |||||||
|             else: |             else: | ||||||
|                 object_id = doc['_id'] |                 object_id = doc['_id'] | ||||||
|                 updates, removals = self._delta() |                 updates, removals = self._delta() | ||||||
|                 if updates: |  | ||||||
|                     collection.update({'_id': object_id}, {"$set": updates}, upsert=True, safe=safe, **write_options) |  | ||||||
|                 if removals: |  | ||||||
|                     collection.update({'_id': object_id}, {"$unset": removals}, upsert=True, safe=safe, **write_options) |  | ||||||
|  |  | ||||||
|             # Save any references / generic references |                 # Need to add shard key to query, or you get an error | ||||||
|             _refs = _refs or [] |                 select_dict = {'_id': object_id} | ||||||
|             for name, cls in self._fields.items(): |                 shard_key = self.__class__._meta.get('shard_key', tuple()) | ||||||
|                 if isinstance(cls, (ReferenceField, GenericReferenceField)): |                 for k in shard_key: | ||||||
|                     ref = getattr(self, name) |                     actual_key = self._db_field_map.get(k, k) | ||||||
|                     if ref and str(ref) not in _refs: |                     select_dict[actual_key] = doc[actual_key] | ||||||
|                         _refs.append(str(ref)) |  | ||||||
|                         ref.save(safe=safe, force_insert=force_insert, |                 upsert = self._created | ||||||
|                                  validate=validate, write_options=write_options, |                 if updates: | ||||||
|                                  _refs=_refs) |                     collection.update(select_dict, {"$set": updates}, upsert=upsert, safe=safe, **write_options) | ||||||
|  |                 if removals: | ||||||
|  |                     collection.update(select_dict, {"$unset": removals}, upsert=upsert, safe=safe, **write_options) | ||||||
|  |  | ||||||
|  |             cascade = self._meta.get('cascade', True) if cascade is None else cascade | ||||||
|  |             if cascade: | ||||||
|  |                 kwargs = { | ||||||
|  |                     "safe": safe, | ||||||
|  |                     "force_insert": force_insert, | ||||||
|  |                     "validate": validate, | ||||||
|  |                     "write_options": write_options, | ||||||
|  |                     "cascade": cascade | ||||||
|  |                 } | ||||||
|  |                 if cascade_kwargs:  # Allow granular control over cascades | ||||||
|  |                     kwargs.update(cascade_kwargs) | ||||||
|  |                 kwargs['_refs'] = _refs | ||||||
|  |                 self._changed_fields = [] | ||||||
|  |                 self.cascade_save(**kwargs) | ||||||
|  |  | ||||||
|         except pymongo.errors.OperationFailure, err: |         except pymongo.errors.OperationFailure, err: | ||||||
|             message = 'Could not save document (%s)' |             message = 'Could not save document (%s)' | ||||||
| @@ -184,21 +237,27 @@ class Document(BaseDocument): | |||||||
|         id_field = self._meta['id_field'] |         id_field = self._meta['id_field'] | ||||||
|         self[id_field] = self._fields[id_field].to_python(object_id) |         self[id_field] = self._fields[id_field].to_python(object_id) | ||||||
|  |  | ||||||
|         def reset_changed_fields(doc, inspected_docs=None): |         self._changed_fields = [] | ||||||
|             """Loop through and reset changed fields lists""" |         self._created = False | ||||||
|  |         signals.post_save.send(self.__class__, document=self, created=created) | ||||||
|  |         return self | ||||||
|  |  | ||||||
|             inspected_docs = inspected_docs or [] |     def cascade_save(self, *args, **kwargs): | ||||||
|             inspected_docs.append(doc) |         """Recursively saves any references / generic references on an object""" | ||||||
|             if hasattr(doc, '_changed_fields'): |         from fields import ReferenceField, GenericReferenceField | ||||||
|                 doc._changed_fields = [] |         _refs = kwargs.get('_refs', []) or [] | ||||||
|  |         for name, cls in self._fields.items(): | ||||||
|             for field_name in doc._fields: |             if not isinstance(cls, (ReferenceField, GenericReferenceField)): | ||||||
|                 field = getattr(doc, field_name) |                 continue | ||||||
|                 if field not in inspected_docs and hasattr(field, '_changed_fields'): |             ref = getattr(self, name) | ||||||
|                     reset_changed_fields(field, inspected_docs) |             if not ref: | ||||||
|  |                 continue | ||||||
|         reset_changed_fields(self) |             ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data)) | ||||||
|         signals.post_save.send(self.__class__, document=self, created=creation_mode) |             if ref and ref_id not in _refs: | ||||||
|  |                 _refs.append(ref_id) | ||||||
|  |                 kwargs["_refs"] = _refs | ||||||
|  |                 ref.save(**kwargs) | ||||||
|  |                 ref._changed_fields = [] | ||||||
|  |  | ||||||
|     def update(self, **kwargs): |     def update(self, **kwargs): | ||||||
|         """Performs an update on the :class:`~mongoengine.Document` |         """Performs an update on the :class:`~mongoengine.Document` | ||||||
| @@ -210,7 +269,12 @@ class Document(BaseDocument): | |||||||
|         if not self.pk: |         if not self.pk: | ||||||
|             raise OperationError('attempt to update a document not yet saved') |             raise OperationError('attempt to update a document not yet saved') | ||||||
|  |  | ||||||
|         return self.__class__.objects(pk=self.pk).update_one(**kwargs) |         # Need to add shard key to query, or you get an error | ||||||
|  |         select_dict = {'pk': self.pk} | ||||||
|  |         shard_key = self.__class__._meta.get('shard_key', tuple()) | ||||||
|  |         for k in shard_key: | ||||||
|  |             select_dict[k] = getattr(self, k) | ||||||
|  |         return self.__class__.objects(**select_dict).update_one(**kwargs) | ||||||
|  |  | ||||||
|     def delete(self, safe=False): |     def delete(self, safe=False): | ||||||
|         """Delete the :class:`~mongoengine.Document` from the database. This |         """Delete the :class:`~mongoengine.Document` from the database. This | ||||||
| @@ -220,10 +284,8 @@ class Document(BaseDocument): | |||||||
|         """ |         """ | ||||||
|         signals.pre_delete.send(self.__class__, document=self) |         signals.pre_delete.send(self.__class__, document=self) | ||||||
|  |  | ||||||
|         id_field = self._meta['id_field'] |  | ||||||
|         object_id = self._fields[id_field].to_mongo(self[id_field]) |  | ||||||
|         try: |         try: | ||||||
|             self.__class__.objects(**{id_field: object_id}).delete(safe=safe) |             self.__class__.objects(pk=self.pk).delete(safe=safe) | ||||||
|         except pymongo.errors.OperationFailure, err: |         except pymongo.errors.OperationFailure, err: | ||||||
|             message = u'Could not delete document (%s)' % err.message |             message = u'Could not delete document (%s)' % err.message | ||||||
|             raise OperationError(message) |             raise OperationError(message) | ||||||
| @@ -231,47 +293,54 @@ class Document(BaseDocument): | |||||||
|         signals.post_delete.send(self.__class__, document=self) |         signals.post_delete.send(self.__class__, document=self) | ||||||
|  |  | ||||||
|     def select_related(self, max_depth=1): |     def select_related(self, max_depth=1): | ||||||
|         """Handles dereferencing of :class:`~pymongo.dbref.DBRef` objects to |         """Handles dereferencing of :class:`~bson.dbref.DBRef` objects to | ||||||
|         a maximum depth in order to cut down the number queries to mongodb. |         a maximum depth in order to cut down the number queries to mongodb. | ||||||
|  |  | ||||||
|         .. versionadded:: 0.5 |         .. versionadded:: 0.5 | ||||||
|         """ |         """ | ||||||
|         from dereference import dereference |         from dereference import DeReference | ||||||
|         self._data = dereference(self._data, max_depth) |         self._data = DeReference()(self._data, max_depth) | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
|     def reload(self): |     def reload(self, max_depth=1): | ||||||
|         """Reloads all attributes from the database. |         """Reloads all attributes from the database. | ||||||
|  |  | ||||||
|         .. versionadded:: 0.1.2 |         .. versionadded:: 0.1.2 | ||||||
|  |         .. versionchanged:: 0.6  Now chainable | ||||||
|         """ |         """ | ||||||
|         id_field = self._meta['id_field'] |         id_field = self._meta['id_field'] | ||||||
|         obj = self.__class__.objects(**{id_field: self[id_field]}).first() |         obj = self.__class__.objects( | ||||||
|  |                 **{id_field: self[id_field]} | ||||||
|  |               ).first().select_related(max_depth=max_depth) | ||||||
|         for field in self._fields: |         for field in self._fields: | ||||||
|             setattr(self, field, self._reload(field, obj[field])) |             setattr(self, field, self._reload(field, obj[field])) | ||||||
|         self._changed_fields = [] |         if self._dynamic: | ||||||
|  |             for name in self._dynamic_fields.keys(): | ||||||
|  |                 setattr(self, name, self._reload(name, obj._data[name])) | ||||||
|  |         self._changed_fields = obj._changed_fields | ||||||
|  |         return obj | ||||||
|  |  | ||||||
|     def _reload(self, key, value): |     def _reload(self, key, value): | ||||||
|         """Used by :meth:`~mongoengine.Document.reload` to ensure the |         """Used by :meth:`~mongoengine.Document.reload` to ensure the | ||||||
|         correct instance is linked to self. |         correct instance is linked to self. | ||||||
|         """ |         """ | ||||||
|         if isinstance(value, BaseDict): |         if isinstance(value, BaseDict): | ||||||
|             value = [(k, self._reload(k,v)) for k,v in value.items()] |             value = [(k, self._reload(k, v)) for k, v in value.items()] | ||||||
|             value = BaseDict(value, instance=self, name=key) |             value = BaseDict(value, self, key) | ||||||
|         elif isinstance(value, BaseList): |         elif isinstance(value, BaseList): | ||||||
|             value = [self._reload(key, v) for v in value] |             value = [self._reload(key, v) for v in value] | ||||||
|             value = BaseList(value, instance=self, name=key) |             value = BaseList(value, self, key) | ||||||
|         elif isinstance(value, EmbeddedDocument): |         elif isinstance(value, (EmbeddedDocument, DynamicEmbeddedDocument)): | ||||||
|             value._changed_fields = [] |             value._changed_fields = [] | ||||||
|         return value |         return value | ||||||
|  |  | ||||||
|     def to_dbref(self): |     def to_dbref(self): | ||||||
|         """Returns an instance of :class:`~pymongo.dbref.DBRef` useful in |         """Returns an instance of :class:`~bson.dbref.DBRef` useful in | ||||||
|         `__raw__` queries.""" |         `__raw__` queries.""" | ||||||
|         if not self.pk: |         if not self.pk: | ||||||
|             msg = "Only saved documents can have a valid dbref" |             msg = "Only saved documents can have a valid dbref" | ||||||
|             raise OperationError(msg) |             raise OperationError(msg) | ||||||
|         return pymongo.dbref.DBRef(self.__class__._get_collection_name(), self.pk) |         return DBRef(self.__class__._get_collection_name(), self.pk) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def register_delete_rule(cls, document_cls, field_name, rule): |     def register_delete_rule(cls, document_cls, field_name, rule): | ||||||
| @@ -285,8 +354,52 @@ class Document(BaseDocument): | |||||||
|         """Drops the entire collection associated with this |         """Drops the entire collection associated with this | ||||||
|         :class:`~mongoengine.Document` type from the database. |         :class:`~mongoengine.Document` type from the database. | ||||||
|         """ |         """ | ||||||
|         db = _get_db() |         from mongoengine.queryset import QuerySet | ||||||
|  |         db = cls._get_db() | ||||||
|         db.drop_collection(cls._get_collection_name()) |         db.drop_collection(cls._get_collection_name()) | ||||||
|  |         QuerySet._reset_already_indexed(cls) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class DynamicDocument(Document): | ||||||
|  |     """A Dynamic Document class allowing flexible, expandable and uncontrolled | ||||||
|  |     schemas.  As a :class:`~mongoengine.Document` subclass, acts in the same | ||||||
|  |     way as an ordinary document but has expando style properties.  Any data | ||||||
|  |     passed or set against the :class:`~mongoengine.DynamicDocument` that is | ||||||
|  |     not a field is automatically converted into a | ||||||
|  |     :class:`~mongoengine.DynamicField` and data can be attributed to that | ||||||
|  |     field. | ||||||
|  |  | ||||||
|  |     ..note:: | ||||||
|  |  | ||||||
|  |         There is one caveat on Dynamic Documents: fields cannot start with `_` | ||||||
|  |     """ | ||||||
|  |     __metaclass__ = TopLevelDocumentMetaclass | ||||||
|  |     _dynamic = True | ||||||
|  |  | ||||||
|  |     def __delattr__(self, *args, **kwargs): | ||||||
|  |         """Deletes the attribute by setting to None and allowing _delta to unset | ||||||
|  |         it""" | ||||||
|  |         field_name = args[0] | ||||||
|  |         if field_name in self._dynamic_fields: | ||||||
|  |             setattr(self, field_name, None) | ||||||
|  |         else: | ||||||
|  |             super(DynamicDocument, self).__delattr__(*args, **kwargs) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class DynamicEmbeddedDocument(EmbeddedDocument): | ||||||
|  |     """A Dynamic Embedded Document class allowing flexible, expandable and | ||||||
|  |     uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more | ||||||
|  |     information about dynamic documents. | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     __metaclass__ = DocumentMetaclass | ||||||
|  |     _dynamic = True | ||||||
|  |  | ||||||
|  |     def __delattr__(self, *args, **kwargs): | ||||||
|  |         """Deletes the attribute by setting to None and allowing _delta to unset | ||||||
|  |         it""" | ||||||
|  |         field_name = args[0] | ||||||
|  |         setattr(self, field_name, None) | ||||||
|  |  | ||||||
|  |  | ||||||
| class MapReduceDocument(object): | class MapReduceDocument(object): | ||||||
| @@ -294,7 +407,7 @@ class MapReduceDocument(object): | |||||||
|  |  | ||||||
|     :param collection: An instance of :class:`~pymongo.Collection` |     :param collection: An instance of :class:`~pymongo.Collection` | ||||||
|     :param key: Document/result key, often an instance of |     :param key: Document/result key, often an instance of | ||||||
|                 :class:`~pymongo.objectid.ObjectId`. If supplied as |                 :class:`~bson.objectid.ObjectId`. If supplied as | ||||||
|                 an ``ObjectId`` found in the given ``collection``, |                 an ``ObjectId`` found in the given ``collection``, | ||||||
|                 the object can be accessed via the ``object`` property. |                 the object can be accessed via the ``object`` property. | ||||||
|     :param value: The result(s) for this key. |     :param value: The result(s) for this key. | ||||||
|   | |||||||
| @@ -1,27 +1,39 @@ | |||||||
| from base import (BaseField, ComplexBaseField, ObjectIdField, | import datetime | ||||||
|                   ValidationError, get_document) | import time | ||||||
| from queryset import DO_NOTHING |  | ||||||
| from document import Document, EmbeddedDocument |  | ||||||
| from connection import _get_db |  | ||||||
| from operator import itemgetter |  | ||||||
|  |  | ||||||
| import re |  | ||||||
| import pymongo |  | ||||||
| import pymongo.dbref |  | ||||||
| import pymongo.son |  | ||||||
| import pymongo.binary |  | ||||||
| import datetime, time |  | ||||||
| import decimal | import decimal | ||||||
| import gridfs | import gridfs | ||||||
|  | import re | ||||||
|  | import uuid | ||||||
|  |  | ||||||
|  | from bson import Binary, DBRef, SON, ObjectId | ||||||
|  |  | ||||||
|  | from base import (BaseField, ComplexBaseField, ObjectIdField, | ||||||
|  |                   ValidationError, get_document, BaseDocument) | ||||||
|  | from queryset import DO_NOTHING, QuerySet | ||||||
|  | from document import Document, EmbeddedDocument | ||||||
|  | from connection import get_db, DEFAULT_CONNECTION_NAME | ||||||
|  | from operator import itemgetter | ||||||
|  |  | ||||||
|  |  | ||||||
|  | try: | ||||||
|  |     from PIL import Image, ImageOps | ||||||
|  | except ImportError: | ||||||
|  |     Image = None | ||||||
|  |     ImageOps = None | ||||||
|  |  | ||||||
|  | try: | ||||||
|  |     from cStringIO import StringIO | ||||||
|  | except ImportError: | ||||||
|  |     from StringIO import StringIO | ||||||
|  |  | ||||||
|  |  | ||||||
| __all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField', | __all__ = ['StringField', 'IntField', 'FloatField', 'BooleanField', | ||||||
|            'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField', |            'DateTimeField', 'EmbeddedDocumentField', 'ListField', 'DictField', | ||||||
|            'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField', |            'ObjectIdField', 'ReferenceField', 'ValidationError', 'MapField', | ||||||
|            'DecimalField', 'ComplexDateTimeField', 'URLField', |            'DecimalField', 'ComplexDateTimeField', 'URLField', 'DynamicField', | ||||||
|            'GenericReferenceField', 'FileField', 'BinaryField', |            'GenericReferenceField', 'FileField', 'BinaryField', | ||||||
|            'SortedListField', 'EmailField', 'GeoPointField', |            'SortedListField', 'EmailField', 'GeoPointField', 'ImageField', | ||||||
|            'SequenceField', 'GenericEmbeddedDocumentField'] |            'SequenceField', 'UUIDField', 'GenericEmbeddedDocumentField'] | ||||||
|  |  | ||||||
| RECURSIVE_REFERENCE_CONSTANT = 'self' | RECURSIVE_REFERENCE_CONSTANT = 'self' | ||||||
|  |  | ||||||
| @@ -40,17 +52,17 @@ class StringField(BaseField): | |||||||
|         return unicode(value) |         return unicode(value) | ||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         assert isinstance(value, (str, unicode)) |         if not isinstance(value, (str, unicode)): | ||||||
|  |             self.error('StringField only accepts string values') | ||||||
|  |  | ||||||
|         if self.max_length is not None and len(value) > self.max_length: |         if self.max_length is not None and len(value) > self.max_length: | ||||||
|             raise ValidationError('String value is too long') |             self.error('String value is too long') | ||||||
|  |  | ||||||
|         if self.min_length is not None and len(value) < self.min_length: |         if self.min_length is not None and len(value) < self.min_length: | ||||||
|             raise ValidationError('String value is too short') |             self.error('String value is too short') | ||||||
|  |  | ||||||
|         if self.regex is not None and self.regex.match(value) is None: |         if self.regex is not None and self.regex.match(value) is None: | ||||||
|             message = 'String value did not match validation regex' |             self.error('String value did not match validation regex') | ||||||
|             raise ValidationError(message) |  | ||||||
|  |  | ||||||
|     def lookup_member(self, member_name): |     def lookup_member(self, member_name): | ||||||
|         return None |         return None | ||||||
| @@ -100,16 +112,15 @@ class URLField(StringField): | |||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         if not URLField.URL_REGEX.match(value): |         if not URLField.URL_REGEX.match(value): | ||||||
|             raise ValidationError('Invalid URL: %s' % value) |             self.error('Invalid URL: %s' % value) | ||||||
|  |  | ||||||
|         if self.verify_exists: |         if self.verify_exists: | ||||||
|             import urllib2 |             import urllib2 | ||||||
|             try: |             try: | ||||||
|                 request = urllib2.Request(value) |                 request = urllib2.Request(value) | ||||||
|                 response = urllib2.urlopen(request) |                 urllib2.urlopen(request) | ||||||
|             except Exception, e: |             except Exception, e: | ||||||
|                 message = 'This URL appears to be a broken link: %s' % e |                 self.error('This URL appears to be a broken link: %s' % e) | ||||||
|                 raise ValidationError(message) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class EmailField(StringField): | class EmailField(StringField): | ||||||
| @@ -126,7 +137,7 @@ class EmailField(StringField): | |||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         if not EmailField.EMAIL_REGEX.match(value): |         if not EmailField.EMAIL_REGEX.match(value): | ||||||
|             raise ValidationError('Invalid Mail-address: %s' % value) |             self.error('Invalid Mail-address: %s' % value) | ||||||
|  |  | ||||||
|  |  | ||||||
| class IntField(BaseField): | class IntField(BaseField): | ||||||
| @@ -144,13 +155,13 @@ class IntField(BaseField): | |||||||
|         try: |         try: | ||||||
|             value = int(value) |             value = int(value) | ||||||
|         except: |         except: | ||||||
|             raise ValidationError('%s could not be converted to int' % value) |             self.error('%s could not be converted to int' % value) | ||||||
|  |  | ||||||
|         if self.min_value is not None and value < self.min_value: |         if self.min_value is not None and value < self.min_value: | ||||||
|             raise ValidationError('Integer value is too small') |             self.error('Integer value is too small') | ||||||
|  |  | ||||||
|         if self.max_value is not None and value > self.max_value: |         if self.max_value is not None and value > self.max_value: | ||||||
|             raise ValidationError('Integer value is too large') |             self.error('Integer value is too large') | ||||||
|  |  | ||||||
|     def prepare_query_value(self, op, value): |     def prepare_query_value(self, op, value): | ||||||
|         return int(value) |         return int(value) | ||||||
| @@ -170,13 +181,14 @@ class FloatField(BaseField): | |||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         if isinstance(value, int): |         if isinstance(value, int): | ||||||
|             value = float(value) |             value = float(value) | ||||||
|         assert isinstance(value, float) |         if not isinstance(value, float): | ||||||
|  |             self.error('FloatField only accepts float values') | ||||||
|  |  | ||||||
|         if self.min_value is not None and value < self.min_value: |         if self.min_value is not None and value < self.min_value: | ||||||
|             raise ValidationError('Float value is too small') |             self.error('Float value is too small') | ||||||
|  |  | ||||||
|         if self.max_value is not None and value > self.max_value: |         if self.max_value is not None and value > self.max_value: | ||||||
|             raise ValidationError('Float value is too large') |             self.error('Float value is too large') | ||||||
|  |  | ||||||
|     def prepare_query_value(self, op, value): |     def prepare_query_value(self, op, value): | ||||||
|         return float(value) |         return float(value) | ||||||
| @@ -207,13 +219,13 @@ class DecimalField(BaseField): | |||||||
|             try: |             try: | ||||||
|                 value = decimal.Decimal(value) |                 value = decimal.Decimal(value) | ||||||
|             except Exception, exc: |             except Exception, exc: | ||||||
|                 raise ValidationError('Could not convert to decimal: %s' % exc) |                 self.error('Could not convert value to decimal: %s' % exc) | ||||||
|  |  | ||||||
|         if self.min_value is not None and value < self.min_value: |         if self.min_value is not None and value < self.min_value: | ||||||
|             raise ValidationError('Decimal value is too small') |             self.error('Decimal value is too small') | ||||||
|  |  | ||||||
|         if self.max_value is not None and value > self.max_value: |         if self.max_value is not None and value > self.max_value: | ||||||
|             raise ValidationError('Decimal value is too large') |             self.error('Decimal value is too large') | ||||||
|  |  | ||||||
|  |  | ||||||
| class BooleanField(BaseField): | class BooleanField(BaseField): | ||||||
| @@ -226,7 +238,8 @@ class BooleanField(BaseField): | |||||||
|         return bool(value) |         return bool(value) | ||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         assert isinstance(value, bool) |         if not isinstance(value, bool): | ||||||
|  |             self.error('BooleanField only accepts boolean values') | ||||||
|  |  | ||||||
|  |  | ||||||
| class DateTimeField(BaseField): | class DateTimeField(BaseField): | ||||||
| @@ -239,7 +252,8 @@ class DateTimeField(BaseField): | |||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         assert isinstance(value, (datetime.datetime, datetime.date)) |         if not isinstance(value, (datetime.datetime, datetime.date)): | ||||||
|  |             self.error(u'cannot parse date "%s"' % value) | ||||||
|  |  | ||||||
|     def to_mongo(self, value): |     def to_mongo(self, value): | ||||||
|         return self.prepare_query_value(None, value) |         return self.prepare_query_value(None, value) | ||||||
| @@ -355,13 +369,13 @@ class ComplexDateTimeField(StringField): | |||||||
|         return self._convert_from_string(data) |         return self._convert_from_string(data) | ||||||
|  |  | ||||||
|     def __set__(self, instance, value): |     def __set__(self, instance, value): | ||||||
|         value = self._convert_from_datetime(value) |         value = self._convert_from_datetime(value) if value else value | ||||||
|         return super(ComplexDateTimeField, self).__set__(instance, value) |         return super(ComplexDateTimeField, self).__set__(instance, value) | ||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         if not isinstance(value, datetime.datetime): |         if not isinstance(value, datetime.datetime): | ||||||
|             raise ValidationError('Only datetime objects may used in a \ |             self.error('Only datetime objects may used in a ' | ||||||
|                                    ComplexDateTimeField') |                        'ComplexDateTimeField') | ||||||
|  |  | ||||||
|     def to_python(self, value): |     def to_python(self, value): | ||||||
|         return self._convert_from_string(value) |         return self._convert_from_string(value) | ||||||
| @@ -381,8 +395,8 @@ class EmbeddedDocumentField(BaseField): | |||||||
|     def __init__(self, document_type, **kwargs): |     def __init__(self, document_type, **kwargs): | ||||||
|         if not isinstance(document_type, basestring): |         if not isinstance(document_type, basestring): | ||||||
|             if not issubclass(document_type, EmbeddedDocument): |             if not issubclass(document_type, EmbeddedDocument): | ||||||
|                 raise ValidationError('Invalid embedded document class ' |                 self.error('Invalid embedded document class provided to an ' | ||||||
|                                       'provided to an EmbeddedDocumentField') |                            'EmbeddedDocumentField') | ||||||
|         self.document_type_obj = document_type |         self.document_type_obj = document_type | ||||||
|         super(EmbeddedDocumentField, self).__init__(**kwargs) |         super(EmbeddedDocumentField, self).__init__(**kwargs) | ||||||
|  |  | ||||||
| @@ -411,8 +425,8 @@ class EmbeddedDocumentField(BaseField): | |||||||
|         """ |         """ | ||||||
|         # Using isinstance also works for subclasses of self.document |         # Using isinstance also works for subclasses of self.document | ||||||
|         if not isinstance(value, self.document_type): |         if not isinstance(value, self.document_type): | ||||||
|             raise ValidationError('Invalid embedded document instance ' |             self.error('Invalid embedded document instance provided to an ' | ||||||
|                                   'provided to an EmbeddedDocumentField') |                        'EmbeddedDocumentField') | ||||||
|         self.document_type.validate(value) |         self.document_type.validate(value) | ||||||
|  |  | ||||||
|     def lookup_member(self, member_name): |     def lookup_member(self, member_name): | ||||||
| @@ -427,6 +441,9 @@ class GenericEmbeddedDocumentField(BaseField): | |||||||
|     :class:`~mongoengine.EmbeddedDocument` to be stored. |     :class:`~mongoengine.EmbeddedDocument` to be stored. | ||||||
|  |  | ||||||
|     Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`. |     Only valid values are subclasses of :class:`~mongoengine.EmbeddedDocument`. | ||||||
|  |  | ||||||
|  |     ..note :: You can use the choices param to limit the acceptable | ||||||
|  |     EmbeddedDocument types | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     def prepare_query_value(self, op, value): |     def prepare_query_value(self, op, value): | ||||||
| @@ -441,8 +458,8 @@ class GenericEmbeddedDocumentField(BaseField): | |||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         if not isinstance(value, EmbeddedDocument): |         if not isinstance(value, EmbeddedDocument): | ||||||
|             raise ValidationError('Invalid embedded document instance ' |             self.error('Invalid embedded document instance provided to an ' | ||||||
|                                   'provided to an GenericEmbeddedDocumentField') |                        'GenericEmbeddedDocumentField') | ||||||
|  |  | ||||||
|         value.validate() |         value.validate() | ||||||
|  |  | ||||||
| @@ -456,9 +473,53 @@ class GenericEmbeddedDocumentField(BaseField): | |||||||
|         return data |         return data | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class DynamicField(BaseField): | ||||||
|  |     """Used by :class:`~mongoengine.DynamicDocument` to handle dynamic data""" | ||||||
|  |  | ||||||
|  |     def to_mongo(self, value): | ||||||
|  |         """Convert a Python type to a MongoDBcompatible type. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         if isinstance(value, basestring): | ||||||
|  |             return value | ||||||
|  |  | ||||||
|  |         if hasattr(value, 'to_mongo'): | ||||||
|  |             return value.to_mongo() | ||||||
|  |  | ||||||
|  |         if not isinstance(value, (dict, list, tuple)): | ||||||
|  |             return value | ||||||
|  |  | ||||||
|  |         is_list = False | ||||||
|  |         if not hasattr(value, 'items'): | ||||||
|  |             is_list = True | ||||||
|  |             value = dict([(k, v) for k, v in enumerate(value)]) | ||||||
|  |  | ||||||
|  |         data = {} | ||||||
|  |         for k, v in value.items(): | ||||||
|  |             data[k] = self.to_mongo(v) | ||||||
|  |  | ||||||
|  |         if is_list:  # Convert back to a list | ||||||
|  |             value = [v for k, v in sorted(data.items(), key=itemgetter(0))] | ||||||
|  |         else: | ||||||
|  |             value = data | ||||||
|  |         return value | ||||||
|  |  | ||||||
|  |     def lookup_member(self, member_name): | ||||||
|  |         return member_name | ||||||
|  |  | ||||||
|  |     def prepare_query_value(self, op, value): | ||||||
|  |         if isinstance(value, basestring): | ||||||
|  |             from mongoengine.fields import StringField | ||||||
|  |             return StringField().prepare_query_value(op, value) | ||||||
|  |         return self.to_mongo(value) | ||||||
|  |  | ||||||
|  |  | ||||||
| class ListField(ComplexBaseField): | class ListField(ComplexBaseField): | ||||||
|     """A list field that wraps a standard field, allowing multiple instances |     """A list field that wraps a standard field, allowing multiple instances | ||||||
|     of the field to be used as a list in the database. |     of the field to be used as a list in the database. | ||||||
|  |  | ||||||
|  |     .. note:: | ||||||
|  |         Required means it cannot be empty - as the default for ListFields is [] | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     # ListFields cannot be indexed with _types - MongoDB doesn't support this |     # ListFields cannot be indexed with _types - MongoDB doesn't support this | ||||||
| @@ -472,14 +533,15 @@ class ListField(ComplexBaseField): | |||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         """Make sure that a list of valid fields is being used. |         """Make sure that a list of valid fields is being used. | ||||||
|         """ |         """ | ||||||
|         if not isinstance(value, (list, tuple)): |         if (not isinstance(value, (list, tuple, QuerySet)) or | ||||||
|             raise ValidationError('Only lists and tuples may be used in a ' |             isinstance(value, basestring)): | ||||||
|                                   'list field') |             self.error('Only lists and tuples may be used in a list field') | ||||||
|         super(ListField, self).validate(value) |         super(ListField, self).validate(value) | ||||||
|  |  | ||||||
|     def prepare_query_value(self, op, value): |     def prepare_query_value(self, op, value): | ||||||
|         if self.field: |         if self.field: | ||||||
|             if op in ('set', 'unset') and (not isinstance(value, basestring) |             if op in ('set', 'unset') and (not isinstance(value, basestring) | ||||||
|  |                 and not isinstance(value, BaseDocument) | ||||||
|                 and hasattr(value, '__iter__')): |                 and hasattr(value, '__iter__')): | ||||||
|                 return [self.field.prepare_query_value(op, v) for v in value] |                 return [self.field.prepare_query_value(op, v) for v in value] | ||||||
|             return self.field.prepare_query_value(op, value) |             return self.field.prepare_query_value(op, value) | ||||||
| @@ -491,27 +553,40 @@ class SortedListField(ListField): | |||||||
|     the database in order to ensure that a sorted list is always |     the database in order to ensure that a sorted list is always | ||||||
|     retrieved. |     retrieved. | ||||||
|  |  | ||||||
|  |     .. warning:: | ||||||
|  |         There is a potential race condition when handling lists.  If you set / | ||||||
|  |         save the whole list then other processes trying to save the whole list | ||||||
|  |         as well could overwrite changes.  The safest way to append to a list is | ||||||
|  |         to perform a push operation. | ||||||
|  |  | ||||||
|     .. versionadded:: 0.4 |     .. versionadded:: 0.4 | ||||||
|  |     .. versionchanged:: 0.6 - added reverse keyword | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     _ordering = None |     _ordering = None | ||||||
|  |     _order_reverse = False | ||||||
|  |  | ||||||
|     def __init__(self, field, **kwargs): |     def __init__(self, field, **kwargs): | ||||||
|         if 'ordering' in kwargs.keys(): |         if 'ordering' in kwargs.keys(): | ||||||
|             self._ordering = kwargs.pop('ordering') |             self._ordering = kwargs.pop('ordering') | ||||||
|  |         if 'reverse' in kwargs.keys(): | ||||||
|  |             self._order_reverse = kwargs.pop('reverse') | ||||||
|         super(SortedListField, self).__init__(field, **kwargs) |         super(SortedListField, self).__init__(field, **kwargs) | ||||||
|  |  | ||||||
|     def to_mongo(self, value): |     def to_mongo(self, value): | ||||||
|         value = super(SortedListField, self).to_mongo(value) |         value = super(SortedListField, self).to_mongo(value) | ||||||
|         if self._ordering is not None: |         if self._ordering is not None: | ||||||
|             return sorted(value, key=itemgetter(self._ordering)) |             return sorted(value, key=itemgetter(self._ordering), reverse=self._order_reverse) | ||||||
|         return sorted(value) |         return sorted(value, reverse=self._order_reverse) | ||||||
|  |  | ||||||
|  |  | ||||||
| class DictField(ComplexBaseField): | class DictField(ComplexBaseField): | ||||||
|     """A dictionary field that wraps a standard Python dictionary. This is |     """A dictionary field that wraps a standard Python dictionary. This is | ||||||
|     similar to an embedded document, but the structure is not defined. |     similar to an embedded document, but the structure is not defined. | ||||||
|  |  | ||||||
|  |     .. note:: | ||||||
|  |         Required means it cannot be empty - as the default for ListFields is [] | ||||||
|  |  | ||||||
|     .. versionadded:: 0.3 |     .. versionadded:: 0.3 | ||||||
|     .. versionchanged:: 0.5 - Can now handle complex / varying types of data |     .. versionchanged:: 0.5 - Can now handle complex / varying types of data | ||||||
|     """ |     """ | ||||||
| @@ -519,7 +594,8 @@ class DictField(ComplexBaseField): | |||||||
|     def __init__(self, basecls=None, field=None, *args, **kwargs): |     def __init__(self, basecls=None, field=None, *args, **kwargs): | ||||||
|         self.field = field |         self.field = field | ||||||
|         self.basecls = basecls or BaseField |         self.basecls = basecls or BaseField | ||||||
|         assert issubclass(self.basecls, BaseField) |         if not issubclass(self.basecls, BaseField): | ||||||
|  |             self.error('DictField only accepts dict values') | ||||||
|         kwargs.setdefault('default', lambda: {}) |         kwargs.setdefault('default', lambda: {}) | ||||||
|         super(DictField, self).__init__(*args, **kwargs) |         super(DictField, self).__init__(*args, **kwargs) | ||||||
|  |  | ||||||
| @@ -527,12 +603,13 @@ class DictField(ComplexBaseField): | |||||||
|         """Make sure that a list of valid fields is being used. |         """Make sure that a list of valid fields is being used. | ||||||
|         """ |         """ | ||||||
|         if not isinstance(value, dict): |         if not isinstance(value, dict): | ||||||
|             raise ValidationError('Only dictionaries may be used in a ' |             self.error('Only dictionaries may be used in a DictField') | ||||||
|                                   'DictField') |  | ||||||
|  |  | ||||||
|         if any(('.' in k or '$' in k) for k in value): |         if any(k for k in value.keys() if not isinstance(k, basestring)): | ||||||
|             raise ValidationError('Invalid dictionary key name - keys may not ' |             self.error('Invalid dictionary key - documents must have only string keys') | ||||||
|                                   'contain "." or "$" characters') |         if any(('.' in k or '$' in k) for k in value.keys()): | ||||||
|  |             self.error('Invalid dictionary key name - keys may not contain "."' | ||||||
|  |                        ' or "$" characters') | ||||||
|         super(DictField, self).validate(value) |         super(DictField, self).validate(value) | ||||||
|  |  | ||||||
|     def lookup_member(self, member_name): |     def lookup_member(self, member_name): | ||||||
| @@ -559,18 +636,19 @@ class MapField(DictField): | |||||||
|  |  | ||||||
|     def __init__(self, field=None, *args, **kwargs): |     def __init__(self, field=None, *args, **kwargs): | ||||||
|         if not isinstance(field, BaseField): |         if not isinstance(field, BaseField): | ||||||
|             raise ValidationError('Argument to MapField constructor must be ' |             self.error('Argument to MapField constructor must be a valid ' | ||||||
|                                   'a valid field') |                        'field') | ||||||
|         super(MapField, self).__init__(field=field, *args, **kwargs) |         super(MapField, self).__init__(field=field, *args, **kwargs) | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ReferenceField(BaseField): | class ReferenceField(BaseField): | ||||||
|     """A reference to a document that will be automatically dereferenced on |     """A reference to a document that will be automatically dereferenced on | ||||||
|     access (lazily). |     access (lazily). | ||||||
|  |  | ||||||
|     Use the `reverse_delete_rule` to handle what should happen if the document |     Use the `reverse_delete_rule` to handle what should happen if the document | ||||||
|     the field is referencing is deleted. |     the field is referencing is deleted.  EmbeddedDocuments, DictFields and | ||||||
|  |     MapFields do not support reverse_delete_rules and an `InvalidDocumentError` | ||||||
|  |     will be raised if trying to set on one of these Document / Field types. | ||||||
|  |  | ||||||
|     The options are: |     The options are: | ||||||
|  |  | ||||||
| @@ -578,6 +656,18 @@ class ReferenceField(BaseField): | |||||||
|       * NULLIFY     - Updates the reference to null. |       * NULLIFY     - Updates the reference to null. | ||||||
|       * CASCADE     - Deletes the documents associated with the reference. |       * CASCADE     - Deletes the documents associated with the reference. | ||||||
|       * DENY        - Prevent the deletion of the reference object. |       * DENY        - Prevent the deletion of the reference object. | ||||||
|  |       * PULL        - Pull the reference from a :class:`~mongoengine.ListField` of references | ||||||
|  |  | ||||||
|  |     Alternative syntax for registering delete rules (useful when implementing | ||||||
|  |     bi-directional delete rules) | ||||||
|  |  | ||||||
|  |     .. code-block:: python | ||||||
|  |  | ||||||
|  |         class Bar(Document): | ||||||
|  |             content = StringField() | ||||||
|  |             foo = ReferenceField('Foo') | ||||||
|  |  | ||||||
|  |         Bar.register_delete_rule(Foo, 'bar', NULLIFY) | ||||||
|  |  | ||||||
|     .. versionchanged:: 0.5 added `reverse_delete_rule` |     .. versionchanged:: 0.5 added `reverse_delete_rule` | ||||||
|     """ |     """ | ||||||
| @@ -590,8 +680,8 @@ class ReferenceField(BaseField): | |||||||
|         """ |         """ | ||||||
|         if not isinstance(document_type, basestring): |         if not isinstance(document_type, basestring): | ||||||
|             if not issubclass(document_type, (Document, basestring)): |             if not issubclass(document_type, (Document, basestring)): | ||||||
|                 raise ValidationError('Argument to ReferenceField constructor ' |                 self.error('Argument to ReferenceField constructor must be a ' | ||||||
|                                       'must be a document class or a string') |                            'document class or a string') | ||||||
|         self.document_type_obj = document_type |         self.document_type_obj = document_type | ||||||
|         self.reverse_delete_rule = reverse_delete_rule |         self.reverse_delete_rule = reverse_delete_rule | ||||||
|         super(ReferenceField, self).__init__(**kwargs) |         super(ReferenceField, self).__init__(**kwargs) | ||||||
| @@ -615,14 +705,17 @@ class ReferenceField(BaseField): | |||||||
|         # Get value from document instance if available |         # Get value from document instance if available | ||||||
|         value = instance._data.get(self.name) |         value = instance._data.get(self.name) | ||||||
|         # Dereference DBRefs |         # Dereference DBRefs | ||||||
|         if isinstance(value, (pymongo.dbref.DBRef)): |         if isinstance(value, (DBRef)): | ||||||
|             value = _get_db().dereference(value) |             value = self.document_type._get_db().dereference(value) | ||||||
|             if value is not None: |             if value is not None: | ||||||
|                 instance._data[self.name] = self.document_type._from_son(value) |                 instance._data[self.name] = self.document_type._from_son(value) | ||||||
|  |  | ||||||
|         return super(ReferenceField, self).__get__(instance, owner) |         return super(ReferenceField, self).__get__(instance, owner) | ||||||
|  |  | ||||||
|     def to_mongo(self, document): |     def to_mongo(self, document): | ||||||
|  |         if isinstance(document, DBRef): | ||||||
|  |             return document | ||||||
|  |  | ||||||
|         id_field_name = self.document_type._meta['id_field'] |         id_field_name = self.document_type._meta['id_field'] | ||||||
|         id_field = self.document_type._fields[id_field_name] |         id_field = self.document_type._fields[id_field_name] | ||||||
|  |  | ||||||
| @@ -630,25 +723,28 @@ class ReferenceField(BaseField): | |||||||
|             # We need the id from the saved object to create the DBRef |             # We need the id from the saved object to create the DBRef | ||||||
|             id_ = document.id |             id_ = document.id | ||||||
|             if id_ is None: |             if id_ is None: | ||||||
|                 raise ValidationError('You can only reference documents once ' |                 self.error('You can only reference documents once they have' | ||||||
|                                       'they have been saved to the database') |                            ' been saved to the database') | ||||||
|         else: |         else: | ||||||
|             id_ = document |             id_ = document | ||||||
|  |  | ||||||
|         id_ = id_field.to_mongo(id_) |         id_ = id_field.to_mongo(id_) | ||||||
|         collection = self.document_type._get_collection_name() |         collection = self.document_type._get_collection_name() | ||||||
|         return pymongo.dbref.DBRef(collection, id_) |         return DBRef(collection, id_) | ||||||
|  |  | ||||||
|     def prepare_query_value(self, op, value): |     def prepare_query_value(self, op, value): | ||||||
|  |         if value is None: | ||||||
|  |             return None | ||||||
|  |  | ||||||
|         return self.to_mongo(value) |         return self.to_mongo(value) | ||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         assert isinstance(value, (self.document_type, pymongo.dbref.DBRef)) |         if not isinstance(value, (self.document_type, DBRef)): | ||||||
|  |             self.error('A ReferenceField only accepts DBRef') | ||||||
|  |  | ||||||
|         if isinstance(value, Document) and value.id is None: |         if isinstance(value, Document) and value.id is None: | ||||||
|             raise ValidationError('You can only reference documents once ' |             self.error('You can only reference documents once they have been ' | ||||||
|                                   'they have been saved to the database') |                        'saved to the database') | ||||||
|  |  | ||||||
|  |  | ||||||
|     def lookup_member(self, member_name): |     def lookup_member(self, member_name): | ||||||
|         return self.document_type._fields.get(member_name) |         return self.document_type._fields.get(member_name) | ||||||
| @@ -661,6 +757,8 @@ class GenericReferenceField(BaseField): | |||||||
|     ..note ::  Any documents used as a generic reference must be registered in the |     ..note ::  Any documents used as a generic reference must be registered in the | ||||||
|     document registry.  Importing the model will automatically register it. |     document registry.  Importing the model will automatically register it. | ||||||
|  |  | ||||||
|  |     ..note :: You can use the choices param to limit the acceptable Document types | ||||||
|  |  | ||||||
|     .. versionadded:: 0.3 |     .. versionadded:: 0.3 | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
| @@ -669,24 +767,24 @@ class GenericReferenceField(BaseField): | |||||||
|             return self |             return self | ||||||
|  |  | ||||||
|         value = instance._data.get(self.name) |         value = instance._data.get(self.name) | ||||||
|         if isinstance(value, (dict, pymongo.son.SON)): |         if isinstance(value, (dict, SON)): | ||||||
|             instance._data[self.name] = self.dereference(value) |             instance._data[self.name] = self.dereference(value) | ||||||
|  |  | ||||||
|         return super(GenericReferenceField, self).__get__(instance, owner) |         return super(GenericReferenceField, self).__get__(instance, owner) | ||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         if not isinstance(value, (Document, pymongo.dbref.DBRef)): |         if not isinstance(value, (Document, DBRef)): | ||||||
|             raise ValidationError('GenericReferences can only contain documents') |             self.error('GenericReferences can only contain documents') | ||||||
|  |  | ||||||
|         # We need the id from the saved object to create the DBRef |         # We need the id from the saved object to create the DBRef | ||||||
|         if isinstance(value, Document) and value.id is None: |         if isinstance(value, Document) and value.id is None: | ||||||
|             raise ValidationError('You can only reference documents once ' |             self.error('You can only reference documents once they have been' | ||||||
|                                   'they have been saved to the database') |                        ' saved to the database') | ||||||
|  |  | ||||||
|     def dereference(self, value): |     def dereference(self, value): | ||||||
|         doc_cls = get_document(value['_cls']) |         doc_cls = get_document(value['_cls']) | ||||||
|         reference = value['_ref'] |         reference = value['_ref'] | ||||||
|         doc = _get_db().dereference(reference) |         doc = doc_cls._get_db().dereference(reference) | ||||||
|         if doc is not None: |         if doc is not None: | ||||||
|             doc = doc_cls._from_son(doc) |             doc = doc_cls._from_son(doc) | ||||||
|         return doc |         return doc | ||||||
| @@ -695,6 +793,9 @@ class GenericReferenceField(BaseField): | |||||||
|         if document is None: |         if document is None: | ||||||
|             return None |             return None | ||||||
|  |  | ||||||
|  |         if isinstance(document, (dict, SON)): | ||||||
|  |             return document | ||||||
|  |  | ||||||
|         id_field_name = document.__class__._meta['id_field'] |         id_field_name = document.__class__._meta['id_field'] | ||||||
|         id_field = document.__class__._fields[id_field_name] |         id_field = document.__class__._fields[id_field_name] | ||||||
|  |  | ||||||
| @@ -702,17 +803,20 @@ class GenericReferenceField(BaseField): | |||||||
|             # We need the id from the saved object to create the DBRef |             # We need the id from the saved object to create the DBRef | ||||||
|             id_ = document.id |             id_ = document.id | ||||||
|             if id_ is None: |             if id_ is None: | ||||||
|                 raise ValidationError('You can only reference documents once ' |                 self.error('You can only reference documents once they have' | ||||||
|                                       'they have been saved to the database') |                            ' been saved to the database') | ||||||
|         else: |         else: | ||||||
|             id_ = document |             id_ = document | ||||||
|  |  | ||||||
|         id_ = id_field.to_mongo(id_) |         id_ = id_field.to_mongo(id_) | ||||||
|         collection = document._get_collection_name() |         collection = document._get_collection_name() | ||||||
|         ref = pymongo.dbref.DBRef(collection, id_) |         ref = DBRef(collection, id_) | ||||||
|         return {'_cls': document._class_name, '_ref': ref} |         return {'_cls': document._class_name, '_ref': ref} | ||||||
|  |  | ||||||
|     def prepare_query_value(self, op, value): |     def prepare_query_value(self, op, value): | ||||||
|  |         if value is None: | ||||||
|  |             return None | ||||||
|  |  | ||||||
|         return self.to_mongo(value) |         return self.to_mongo(value) | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -725,17 +829,18 @@ class BinaryField(BaseField): | |||||||
|         super(BinaryField, self).__init__(**kwargs) |         super(BinaryField, self).__init__(**kwargs) | ||||||
|  |  | ||||||
|     def to_mongo(self, value): |     def to_mongo(self, value): | ||||||
|         return pymongo.binary.Binary(value) |         return Binary(value) | ||||||
|  |  | ||||||
|     def to_python(self, value): |     def to_python(self, value): | ||||||
|         # Returns str not unicode as this is binary data |         # Returns str not unicode as this is binary data | ||||||
|         return str(value) |         return str(value) | ||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         assert isinstance(value, str) |         if not isinstance(value, str): | ||||||
|  |             self.error('BinaryField only accepts string values') | ||||||
|  |  | ||||||
|         if self.max_bytes is not None and len(value) > self.max_bytes: |         if self.max_bytes is not None and len(value) > self.max_bytes: | ||||||
|             raise ValidationError('Binary value is too long') |             self.error('Binary value is too long') | ||||||
|  |  | ||||||
|  |  | ||||||
| class GridFSError(Exception): | class GridFSError(Exception): | ||||||
| @@ -747,17 +852,28 @@ class GridFSProxy(object): | |||||||
|  |  | ||||||
|     .. versionadded:: 0.4 |     .. versionadded:: 0.4 | ||||||
|     .. versionchanged:: 0.5 - added optional size param to read |     .. versionchanged:: 0.5 - added optional size param to read | ||||||
|  |     .. versionchanged:: 0.6 - added collection name param | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     def __init__(self, grid_id=None, key=None, instance=None): |     _fs = None | ||||||
|         self.fs = gridfs.GridFS(_get_db())  # Filesystem instance |  | ||||||
|         self.newfile = None                 # Used for partial writes |     def __init__(self, grid_id=None, key=None, | ||||||
|         self.grid_id = grid_id              # Store GridFS id for file |                  instance=None, | ||||||
|         self.gridout = None |                  db_alias=DEFAULT_CONNECTION_NAME, | ||||||
|  |                  collection_name='fs'): | ||||||
|  |         self.grid_id = grid_id                  # Store GridFS id for file | ||||||
|         self.key = key |         self.key = key | ||||||
|         self.instance = instance |         self.instance = instance | ||||||
|  |         self.db_alias = db_alias | ||||||
|  |         self.collection_name = collection_name | ||||||
|  |         self.newfile = None                     # Used for partial writes | ||||||
|  |         self.gridout = None | ||||||
|  |  | ||||||
|     def __getattr__(self, name): |     def __getattr__(self, name): | ||||||
|  |         attrs = ('_fs', 'grid_id', 'key', 'instance', 'db_alias', | ||||||
|  |                  'collection_name', 'newfile', 'gridout') | ||||||
|  |         if name in attrs: | ||||||
|  |             return self.__getattribute__(name) | ||||||
|         obj = self.get() |         obj = self.get() | ||||||
|         if name in dir(obj): |         if name in dir(obj): | ||||||
|             return getattr(obj, name) |             return getattr(obj, name) | ||||||
| @@ -769,6 +885,24 @@ class GridFSProxy(object): | |||||||
|     def __nonzero__(self): |     def __nonzero__(self): | ||||||
|         return bool(self.grid_id) |         return bool(self.grid_id) | ||||||
|  |  | ||||||
|  |     def __getstate__(self): | ||||||
|  |         self_dict = self.__dict__ | ||||||
|  |         self_dict['_fs'] = None | ||||||
|  |         return self_dict | ||||||
|  |  | ||||||
|  |     def __repr__(self): | ||||||
|  |         return '<%s: %s>' % (self.__class__.__name__, self.grid_id) | ||||||
|  |  | ||||||
|  |     def __cmp__(self, other): | ||||||
|  |         return cmp((self.grid_id, self.collection_name, self.db_alias), | ||||||
|  |                    (other.grid_id, other.collection_name, other.db_alias)) | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def fs(self): | ||||||
|  |         if not self._fs: | ||||||
|  |             self._fs = gridfs.GridFS(get_db(self.db_alias), self.collection_name) | ||||||
|  |         return self._fs | ||||||
|  |  | ||||||
|     def get(self, id=None): |     def get(self, id=None): | ||||||
|         if id: |         if id: | ||||||
|             self.grid_id = id |             self.grid_id = id | ||||||
| @@ -809,10 +943,14 @@ class GridFSProxy(object): | |||||||
|         self.newfile.writelines(lines) |         self.newfile.writelines(lines) | ||||||
|  |  | ||||||
|     def read(self, size=-1): |     def read(self, size=-1): | ||||||
|         try: |         gridout = self.get() | ||||||
|             return self.get().read(size) |         if gridout is None: | ||||||
|         except: |  | ||||||
|             return None |             return None | ||||||
|  |         else: | ||||||
|  |             try: | ||||||
|  |                 return gridout.read(size) | ||||||
|  |             except: | ||||||
|  |                 return "" | ||||||
|  |  | ||||||
|     def delete(self): |     def delete(self): | ||||||
|         # Delete file from GridFS, FileField still remains |         # Delete file from GridFS, FileField still remains | ||||||
| @@ -840,10 +978,16 @@ class FileField(BaseField): | |||||||
|  |  | ||||||
|     .. versionadded:: 0.4 |     .. versionadded:: 0.4 | ||||||
|     .. versionchanged:: 0.5 added optional size param for read |     .. versionchanged:: 0.5 added optional size param for read | ||||||
|  |     .. versionchanged:: 0.6 added db_alias for multidb support | ||||||
|     """ |     """ | ||||||
|  |     proxy_class = GridFSProxy | ||||||
|  |  | ||||||
|     def __init__(self, **kwargs): |     def __init__(self, | ||||||
|  |                  db_alias=DEFAULT_CONNECTION_NAME, | ||||||
|  |                  collection_name="fs", **kwargs): | ||||||
|         super(FileField, self).__init__(**kwargs) |         super(FileField, self).__init__(**kwargs) | ||||||
|  |         self.collection_name = collection_name | ||||||
|  |         self.db_alias = db_alias | ||||||
|  |  | ||||||
|     def __get__(self, instance, owner): |     def __get__(self, instance, owner): | ||||||
|         if instance is None: |         if instance is None: | ||||||
| @@ -851,17 +995,20 @@ class FileField(BaseField): | |||||||
|  |  | ||||||
|         # Check if a file already exists for this model |         # Check if a file already exists for this model | ||||||
|         grid_file = instance._data.get(self.name) |         grid_file = instance._data.get(self.name) | ||||||
|         self.grid_file = grid_file |         if not isinstance(grid_file, self.proxy_class): | ||||||
|         if isinstance(self.grid_file, GridFSProxy): |             grid_file = self.proxy_class(key=self.name, instance=instance, | ||||||
|             if not self.grid_file.key: |                                          db_alias=self.db_alias, | ||||||
|                 self.grid_file.key = self.name |                                          collection_name=self.collection_name) | ||||||
|                 self.grid_file.instance = instance |             instance._data[self.name] = grid_file | ||||||
|             return self.grid_file |  | ||||||
|         return GridFSProxy(key=self.name, instance=instance) |         if not grid_file.key: | ||||||
|  |             grid_file.key = self.name | ||||||
|  |             grid_file.instance = instance | ||||||
|  |         return grid_file | ||||||
|  |  | ||||||
|     def __set__(self, instance, value): |     def __set__(self, instance, value): | ||||||
|         key = self.name |         key = self.name | ||||||
|         if isinstance(value, file) or isinstance(value, str): |         if (hasattr(value, 'read') and not isinstance(value, GridFSProxy)) or isinstance(value, str): | ||||||
|             # using "FileField() = file/string" notation |             # using "FileField() = file/string" notation | ||||||
|             grid_file = instance._data.get(self.name) |             grid_file = instance._data.get(self.name) | ||||||
|             # If a file already exists, delete it |             # If a file already exists, delete it | ||||||
| @@ -874,7 +1021,8 @@ class FileField(BaseField): | |||||||
|                 grid_file.put(value) |                 grid_file.put(value) | ||||||
|             else: |             else: | ||||||
|                 # Create a new proxy object as we don't already have one |                 # Create a new proxy object as we don't already have one | ||||||
|                 instance._data[key] = GridFSProxy(key=key, instance=instance) |                 instance._data[key] = self.proxy_class(key=key, instance=instance, | ||||||
|  |                                                        collection_name=self.collection_name) | ||||||
|                 instance._data[key].put(value) |                 instance._data[key].put(value) | ||||||
|         else: |         else: | ||||||
|             instance._data[key] = value |             instance._data[key] = value | ||||||
| @@ -883,18 +1031,181 @@ class FileField(BaseField): | |||||||
|  |  | ||||||
|     def to_mongo(self, value): |     def to_mongo(self, value): | ||||||
|         # Store the GridFS file id in MongoDB |         # Store the GridFS file id in MongoDB | ||||||
|         if isinstance(value, GridFSProxy) and value.grid_id is not None: |         if isinstance(value, self.proxy_class) and value.grid_id is not None: | ||||||
|             return value.grid_id |             return value.grid_id | ||||||
|         return None |         return None | ||||||
|  |  | ||||||
|     def to_python(self, value): |     def to_python(self, value): | ||||||
|         if value is not None: |         if value is not None: | ||||||
|             return GridFSProxy(value) |             return self.proxy_class(value, | ||||||
|  |                                     collection_name=self.collection_name, | ||||||
|  |                                     db_alias=self.db_alias) | ||||||
|  |  | ||||||
|     def validate(self, value): |     def validate(self, value): | ||||||
|         if value.grid_id is not None: |         if value.grid_id is not None: | ||||||
|             assert isinstance(value, GridFSProxy) |             if not isinstance(value, self.proxy_class): | ||||||
|             assert isinstance(value.grid_id, pymongo.objectid.ObjectId) |                 self.error('FileField only accepts GridFSProxy values') | ||||||
|  |             if not isinstance(value.grid_id, ObjectId): | ||||||
|  |                 self.error('Invalid GridFSProxy value') | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class ImageGridFsProxy(GridFSProxy): | ||||||
|  |     """ | ||||||
|  |     Proxy for ImageField | ||||||
|  |  | ||||||
|  |     versionadded: 0.6 | ||||||
|  |     """ | ||||||
|  |     def put(self, file_obj, **kwargs): | ||||||
|  |         """ | ||||||
|  |         Insert a image in database | ||||||
|  |         applying field properties (size, thumbnail_size) | ||||||
|  |         """ | ||||||
|  |         field = self.instance._fields[self.key] | ||||||
|  |  | ||||||
|  |         try: | ||||||
|  |             img = Image.open(file_obj) | ||||||
|  |         except: | ||||||
|  |             raise ValidationError('Invalid image') | ||||||
|  |  | ||||||
|  |         if (field.size and (img.size[0] > field.size['width'] or | ||||||
|  |                             img.size[1] > field.size['height'])): | ||||||
|  |             size = field.size | ||||||
|  |  | ||||||
|  |             if size['force']: | ||||||
|  |                 img = ImageOps.fit(img, | ||||||
|  |                                    (size['width'], | ||||||
|  |                                     size['height']), | ||||||
|  |                                    Image.ANTIALIAS) | ||||||
|  |             else: | ||||||
|  |                 img.thumbnail((size['width'], | ||||||
|  |                                size['height']), | ||||||
|  |                               Image.ANTIALIAS) | ||||||
|  |  | ||||||
|  |         thumbnail = None | ||||||
|  |         if field.thumbnail_size: | ||||||
|  |             size = field.thumbnail_size | ||||||
|  |  | ||||||
|  |             if size['force']: | ||||||
|  |                 thumbnail = ImageOps.fit(img, | ||||||
|  |                                    (size['width'], | ||||||
|  |                                     size['height']), | ||||||
|  |                                    Image.ANTIALIAS) | ||||||
|  |             else: | ||||||
|  |                 thumbnail = img.copy() | ||||||
|  |                 thumbnail.thumbnail((size['width'], | ||||||
|  |                                      size['height']), | ||||||
|  |                                     Image.ANTIALIAS) | ||||||
|  |  | ||||||
|  |         if thumbnail: | ||||||
|  |             thumb_id = self._put_thumbnail(thumbnail, | ||||||
|  |                                           img.format) | ||||||
|  |         else: | ||||||
|  |             thumb_id = None | ||||||
|  |  | ||||||
|  |         w, h = img.size | ||||||
|  |  | ||||||
|  |         io = StringIO() | ||||||
|  |         img.save(io, img.format) | ||||||
|  |         io.seek(0) | ||||||
|  |  | ||||||
|  |         return super(ImageGridFsProxy, self).put(io, | ||||||
|  |                                                  width=w, | ||||||
|  |                                                  height=h, | ||||||
|  |                                                  format=img.format, | ||||||
|  |                                                  thumbnail_id=thumb_id, | ||||||
|  |                                                  **kwargs) | ||||||
|  |  | ||||||
|  |     def delete(self, *args, **kwargs): | ||||||
|  |         #deletes thumbnail | ||||||
|  |         out = self.get() | ||||||
|  |         if out and out.thumbnail_id: | ||||||
|  |             self.fs.delete(out.thumbnail_id) | ||||||
|  |  | ||||||
|  |         return super(ImageGridFsProxy, self).delete(*args, **kwargs) | ||||||
|  |  | ||||||
|  |     def _put_thumbnail(self, thumbnail, format, **kwargs): | ||||||
|  |         w, h = thumbnail.size | ||||||
|  |  | ||||||
|  |         io = StringIO() | ||||||
|  |         thumbnail.save(io, format) | ||||||
|  |         io.seek(0) | ||||||
|  |  | ||||||
|  |         return self.fs.put(io, width=w, | ||||||
|  |                            height=h, | ||||||
|  |                            format=format, | ||||||
|  |                            **kwargs) | ||||||
|  |     @property | ||||||
|  |     def size(self): | ||||||
|  |         """ | ||||||
|  |         return a width, height of image | ||||||
|  |         """ | ||||||
|  |         out = self.get() | ||||||
|  |         if out: | ||||||
|  |             return out.width, out.height | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def format(self): | ||||||
|  |         """ | ||||||
|  |         return format of image | ||||||
|  |         ex: PNG, JPEG, GIF, etc | ||||||
|  |         """ | ||||||
|  |         out = self.get() | ||||||
|  |         if out: | ||||||
|  |             return out.format | ||||||
|  |  | ||||||
|  |     @property | ||||||
|  |     def thumbnail(self): | ||||||
|  |         """ | ||||||
|  |         return a gridfs.grid_file.GridOut | ||||||
|  |         representing a thumbnail of Image | ||||||
|  |         """ | ||||||
|  |         out = self.get() | ||||||
|  |         if out and out.thumbnail_id: | ||||||
|  |             return self.fs.get(out.thumbnail_id) | ||||||
|  |  | ||||||
|  |     def write(self, *args, **kwargs): | ||||||
|  |         raise RuntimeError("Please use \"put\" method instead") | ||||||
|  |  | ||||||
|  |     def writelines(self, *args, **kwargs): | ||||||
|  |         raise RuntimeError("Please use \"put\" method instead") | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class ImproperlyConfigured(Exception): | ||||||
|  |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class ImageField(FileField): | ||||||
|  |     """ | ||||||
|  |     A Image File storage field. | ||||||
|  |  | ||||||
|  |     @size (width, height, force): | ||||||
|  |         max size to store images, if larger will be automatically resized | ||||||
|  |         ex: size=(800, 600, True) | ||||||
|  |  | ||||||
|  |     @thumbnail (width, height, force): | ||||||
|  |         size to generate a thumbnail | ||||||
|  |  | ||||||
|  |     .. versionadded:: 0.6 | ||||||
|  |     """ | ||||||
|  |     proxy_class = ImageGridFsProxy | ||||||
|  |  | ||||||
|  |     def __init__(self, size=None, thumbnail_size=None, | ||||||
|  |                  collection_name='images', **kwargs): | ||||||
|  |         if not Image: | ||||||
|  |             raise ImproperlyConfigured("PIL library was not found") | ||||||
|  |  | ||||||
|  |         params_size = ('width', 'height', 'force') | ||||||
|  |         extra_args = dict(size=size, thumbnail_size=thumbnail_size) | ||||||
|  |         for att_name, att in extra_args.items(): | ||||||
|  |             if att and (isinstance(att, tuple) or isinstance(att, list)): | ||||||
|  |                 setattr(self, att_name, dict( | ||||||
|  |                         map(None, params_size, att))) | ||||||
|  |             else: | ||||||
|  |                 setattr(self, att_name, None) | ||||||
|  |  | ||||||
|  |         super(ImageField, self).__init__( | ||||||
|  |             collection_name=collection_name, | ||||||
|  |             **kwargs) | ||||||
|  |  | ||||||
|  |  | ||||||
| class GeoPointField(BaseField): | class GeoPointField(BaseField): | ||||||
| @@ -909,14 +1220,14 @@ class GeoPointField(BaseField): | |||||||
|         """Make sure that a geo-value is of type (x, y) |         """Make sure that a geo-value is of type (x, y) | ||||||
|         """ |         """ | ||||||
|         if not isinstance(value, (list, tuple)): |         if not isinstance(value, (list, tuple)): | ||||||
|             raise ValidationError('GeoPointField can only accept tuples or ' |             self.error('GeoPointField can only accept tuples or lists ' | ||||||
|                                   'lists of (x, y)') |                        'of (x, y)') | ||||||
|  |  | ||||||
|         if not len(value) == 2: |         if not len(value) == 2: | ||||||
|             raise ValidationError('Value must be a two-dimensional point.') |             self.error('Value must be a two-dimensional point') | ||||||
|         if (not isinstance(value[0], (float, int)) and |         if (not isinstance(value[0], (float, int)) and | ||||||
|             not isinstance(value[1], (float, int))): |             not isinstance(value[1], (float, int))): | ||||||
|             raise ValidationError('Both values in point must be float or int.') |             self.error('Both values in point must be float or int') | ||||||
|  |  | ||||||
|  |  | ||||||
| class SequenceField(IntField): | class SequenceField(IntField): | ||||||
| @@ -932,8 +1243,9 @@ class SequenceField(IntField): | |||||||
|  |  | ||||||
|     .. versionadded:: 0.5 |     .. versionadded:: 0.5 | ||||||
|     """ |     """ | ||||||
|     def __init__(self, collection_name=None, *args, **kwargs): |     def __init__(self, collection_name=None, db_alias = None, *args, **kwargs): | ||||||
|         self.collection_name = collection_name or 'mongoengine.counters' |         self.collection_name = collection_name or 'mongoengine.counters' | ||||||
|  |         self.db_alias = db_alias or DEFAULT_CONNECTION_NAME | ||||||
|         return super(SequenceField, self).__init__(*args, **kwargs) |         return super(SequenceField, self).__init__(*args, **kwargs) | ||||||
|  |  | ||||||
|     def generate_new_value(self): |     def generate_new_value(self): | ||||||
| @@ -942,7 +1254,7 @@ class SequenceField(IntField): | |||||||
|         """ |         """ | ||||||
|         sequence_id = "{0}.{1}".format(self.owner_document._get_collection_name(), |         sequence_id = "{0}.{1}".format(self.owner_document._get_collection_name(), | ||||||
|                                        self.name) |                                        self.name) | ||||||
|         collection = _get_db()[self.collection_name] |         collection = get_db(alias = self.db_alias )[self.collection_name] | ||||||
|         counter = collection.find_and_modify(query={"_id": sequence_id}, |         counter = collection.find_and_modify(query={"_id": sequence_id}, | ||||||
|                                              update={"$inc": {"next": 1}}, |                                              update={"$inc": {"next": 1}}, | ||||||
|                                              new=True, |                                              new=True, | ||||||
| @@ -977,3 +1289,30 @@ class SequenceField(IntField): | |||||||
|         if value is None: |         if value is None: | ||||||
|             value = self.generate_new_value() |             value = self.generate_new_value() | ||||||
|         return value |         return value | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class UUIDField(BaseField): | ||||||
|  |     """A UUID field. | ||||||
|  |  | ||||||
|  |     .. versionadded:: 0.6 | ||||||
|  |     """ | ||||||
|  |  | ||||||
|  |     def __init__(self, **kwargs): | ||||||
|  |         super(UUIDField, self).__init__(**kwargs) | ||||||
|  |  | ||||||
|  |     def to_python(self, value): | ||||||
|  |         if not isinstance(value, basestring): | ||||||
|  |             value = unicode(value) | ||||||
|  |         return uuid.UUID(value) | ||||||
|  |  | ||||||
|  |     def to_mongo(self, value): | ||||||
|  |         return unicode(value) | ||||||
|  |  | ||||||
|  |     def validate(self, value): | ||||||
|  |         if not isinstance(value, uuid.UUID): | ||||||
|  |             if not isinstance(value, basestring): | ||||||
|  |                 value = str(value) | ||||||
|  |             try: | ||||||
|  |                 value = uuid.UUID(value) | ||||||
|  |             except Exception, exc: | ||||||
|  |                 self.error('Could not convert to UUID: %s' % exc) | ||||||
|   | |||||||
| @@ -1,17 +1,16 @@ | |||||||
| from connection import _get_db |  | ||||||
|  |  | ||||||
| import pprint | import pprint | ||||||
| import pymongo |  | ||||||
| import pymongo.code |  | ||||||
| import pymongo.dbref |  | ||||||
| import pymongo.objectid |  | ||||||
| import re | import re | ||||||
| import copy | import copy | ||||||
| import itertools | import itertools | ||||||
| import operator | import operator | ||||||
|  |  | ||||||
|  | import pymongo | ||||||
|  | from bson.code import Code | ||||||
|  |  | ||||||
|  | from mongoengine import signals | ||||||
|  |  | ||||||
| __all__ = ['queryset_manager', 'Q', 'InvalidQueryError', | __all__ = ['queryset_manager', 'Q', 'InvalidQueryError', | ||||||
|            'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY'] |            'DO_NOTHING', 'NULLIFY', 'CASCADE', 'DENY', 'PULL'] | ||||||
|  |  | ||||||
|  |  | ||||||
| # The maximum number of items to display in a QuerySet.__repr__ | # The maximum number of items to display in a QuerySet.__repr__ | ||||||
| @@ -22,6 +21,7 @@ DO_NOTHING = 0 | |||||||
| NULLIFY = 1 | NULLIFY = 1 | ||||||
| CASCADE = 2 | CASCADE = 2 | ||||||
| DENY = 3 | DENY = 3 | ||||||
|  | PULL = 4 | ||||||
|  |  | ||||||
|  |  | ||||||
| class DoesNotExist(Exception): | class DoesNotExist(Exception): | ||||||
| @@ -274,16 +274,20 @@ class Q(QNode): | |||||||
|  |  | ||||||
| class QueryFieldList(object): | class QueryFieldList(object): | ||||||
|     """Object that handles combinations of .only() and .exclude() calls""" |     """Object that handles combinations of .only() and .exclude() calls""" | ||||||
|     ONLY = True |     ONLY = 1 | ||||||
|     EXCLUDE = False |     EXCLUDE = 0 | ||||||
|  |  | ||||||
|     def __init__(self, fields=[], value=ONLY, always_include=[]): |     def __init__(self, fields=[], value=ONLY, always_include=[]): | ||||||
|         self.value = value |         self.value = value | ||||||
|         self.fields = set(fields) |         self.fields = set(fields) | ||||||
|         self.always_include = set(always_include) |         self.always_include = set(always_include) | ||||||
|  |         self._id = None | ||||||
|  |  | ||||||
|     def as_dict(self): |     def as_dict(self): | ||||||
|         return dict((field, self.value) for field in self.fields) |         field_list = dict((field, self.value) for field in self.fields) | ||||||
|  |         if self._id is not None: | ||||||
|  |             field_list['_id'] = self._id | ||||||
|  |         return field_list | ||||||
|  |  | ||||||
|     def __add__(self, f): |     def __add__(self, f): | ||||||
|         if not self.fields: |         if not self.fields: | ||||||
| @@ -299,6 +303,9 @@ class QueryFieldList(object): | |||||||
|             self.value = self.ONLY |             self.value = self.ONLY | ||||||
|             self.fields = f.fields - self.fields |             self.fields = f.fields - self.fields | ||||||
|  |  | ||||||
|  |         if '_id' in f.fields: | ||||||
|  |             self._id = f.value | ||||||
|  |  | ||||||
|         if self.always_include: |         if self.always_include: | ||||||
|             if self.value is self.ONLY and self.fields: |             if self.value is self.ONLY and self.fields: | ||||||
|                 self.fields = self.fields.union(self.always_include) |                 self.fields = self.fields.union(self.always_include) | ||||||
| @@ -334,6 +341,7 @@ class QuerySet(object): | |||||||
|         self._timeout = True |         self._timeout = True | ||||||
|         self._class_check = True |         self._class_check = True | ||||||
|         self._slave_okay = False |         self._slave_okay = False | ||||||
|  |         self._scalar = [] | ||||||
|  |  | ||||||
|         # If inheritance is allowed, only return instances and instances of |         # If inheritance is allowed, only return instances and instances of | ||||||
|         # subclasses of the class being used |         # subclasses of the class being used | ||||||
| @@ -387,57 +395,6 @@ class QuerySet(object): | |||||||
|             unique=index_spec.get('unique', False)) |             unique=index_spec.get('unique', False)) | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     def _build_index_spec(cls, doc_cls, spec): |  | ||||||
|         """Build a PyMongo index spec from a MongoEngine index spec. |  | ||||||
|         """ |  | ||||||
|         if isinstance(spec, basestring): |  | ||||||
|             spec = {'fields': [spec]} |  | ||||||
|         if isinstance(spec, (list, tuple)): |  | ||||||
|             spec = {'fields': spec} |  | ||||||
|  |  | ||||||
|         index_list = [] |  | ||||||
|         use_types = doc_cls._meta.get('allow_inheritance', True) |  | ||||||
|         for key in spec['fields']: |  | ||||||
|             # Get direction from + or - |  | ||||||
|             direction = pymongo.ASCENDING |  | ||||||
|             if key.startswith("-"): |  | ||||||
|                 direction = pymongo.DESCENDING |  | ||||||
|             if key.startswith(("+", "-")): |  | ||||||
|                     key = key[1:] |  | ||||||
|  |  | ||||||
|             # Use real field name, do it manually because we need field |  | ||||||
|             # objects for the next part (list field checking) |  | ||||||
|             parts = key.split('.') |  | ||||||
|             fields = QuerySet._lookup_field(doc_cls, parts) |  | ||||||
|             parts = [field.db_field for field in fields] |  | ||||||
|             key = '.'.join(parts) |  | ||||||
|             index_list.append((key, direction)) |  | ||||||
|  |  | ||||||
|             # Check if a list field is being used, don't use _types if it is |  | ||||||
|             if use_types and not all(f._index_with_types for f in fields): |  | ||||||
|                 use_types = False |  | ||||||
|  |  | ||||||
|         # If _types is being used, prepend it to every specified index |  | ||||||
|         index_types = doc_cls._meta.get('index_types', True) |  | ||||||
|         allow_inheritance = doc_cls._meta.get('allow_inheritance') |  | ||||||
|         if spec.get('types', index_types) and allow_inheritance and use_types: |  | ||||||
|             index_list.insert(0, ('_types', 1)) |  | ||||||
|  |  | ||||||
|         spec['fields'] = index_list |  | ||||||
|  |  | ||||||
|         if spec.get('sparse', False) and len(spec['fields']) > 1: |  | ||||||
|             raise ValueError( |  | ||||||
|                 'Sparse indexes can only have one field in them. ' |  | ||||||
|                 'See https://jira.mongodb.org/browse/SERVER-2193') |  | ||||||
|  |  | ||||||
|         return spec |  | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     def _reset_already_indexed(cls): |  | ||||||
|         """Helper to reset already indexed, can be useful for testing purposes""" |  | ||||||
|         cls.__already_indexed = set() |  | ||||||
|  |  | ||||||
|     def __call__(self, q_obj=None, class_check=True, slave_okay=False, **query): |     def __call__(self, q_obj=None, class_check=True, slave_okay=False, **query): | ||||||
|         """Filter the selected documents by calling the |         """Filter the selected documents by calling the | ||||||
|         :class:`~mongoengine.queryset.QuerySet` with a query. |         :class:`~mongoengine.queryset.QuerySet` with a query. | ||||||
| @@ -470,60 +427,138 @@ class QuerySet(object): | |||||||
|         """Returns all documents.""" |         """Returns all documents.""" | ||||||
|         return self.__call__() |         return self.__call__() | ||||||
|  |  | ||||||
|  |     def _ensure_indexes(self): | ||||||
|  |         """Checks the document meta data and ensures all the indexes exist. | ||||||
|  |  | ||||||
|  |         .. note:: You can disable automatic index creation by setting | ||||||
|  |                   `auto_create_index` to False in the documents meta data | ||||||
|  |         """ | ||||||
|  |         background = self._document._meta.get('index_background', False) | ||||||
|  |         drop_dups = self._document._meta.get('index_drop_dups', False) | ||||||
|  |         index_opts = self._document._meta.get('index_opts', {}) | ||||||
|  |         index_types = self._document._meta.get('index_types', True) | ||||||
|  |  | ||||||
|  |         # determine if an index which we are creating includes | ||||||
|  |         # _type as its first field; if so, we can avoid creating | ||||||
|  |         # an extra index on _type, as mongodb will use the existing | ||||||
|  |         # index to service queries against _type | ||||||
|  |         types_indexed = False | ||||||
|  |         def includes_types(fields): | ||||||
|  |             first_field = None | ||||||
|  |             if len(fields): | ||||||
|  |                 if isinstance(fields[0], basestring): | ||||||
|  |                     first_field = fields[0] | ||||||
|  |                 elif isinstance(fields[0], (list, tuple)) and len(fields[0]): | ||||||
|  |                     first_field = fields[0][0] | ||||||
|  |             return first_field == '_types' | ||||||
|  |  | ||||||
|  |         # Ensure indexes created by uniqueness constraints | ||||||
|  |         for index in self._document._meta['unique_indexes']: | ||||||
|  |             types_indexed = types_indexed or includes_types(index) | ||||||
|  |             self._collection.ensure_index(index, unique=True, | ||||||
|  |                 background=background, drop_dups=drop_dups, **index_opts) | ||||||
|  |  | ||||||
|  |         # Ensure document-defined indexes are created | ||||||
|  |         if self._document._meta['indexes']: | ||||||
|  |             for spec in self._document._meta['indexes']: | ||||||
|  |                 types_indexed = types_indexed or includes_types(spec['fields']) | ||||||
|  |                 opts = index_opts.copy() | ||||||
|  |                 opts['unique'] = spec.get('unique', False) | ||||||
|  |                 opts['sparse'] = spec.get('sparse', False) | ||||||
|  |                 self._collection.ensure_index(spec['fields'], | ||||||
|  |                     background=background, **opts) | ||||||
|  |  | ||||||
|  |         # If _types is being used (for polymorphism), it needs an index, | ||||||
|  |         # only if another index doesn't begin with _types | ||||||
|  |         if index_types and '_types' in self._query and not types_indexed: | ||||||
|  |             self._collection.ensure_index('_types', | ||||||
|  |                 background=background, **index_opts) | ||||||
|  |  | ||||||
|  |         # Add geo indicies | ||||||
|  |         for field in self._document._geo_indices(): | ||||||
|  |             index_spec = [(field.db_field, pymongo.GEO2D)] | ||||||
|  |             self._collection.ensure_index(index_spec, | ||||||
|  |                 background=background, **index_opts) | ||||||
|  |  | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def _build_index_spec(cls, doc_cls, spec): | ||||||
|  |         """Build a PyMongo index spec from a MongoEngine index spec. | ||||||
|  |         """ | ||||||
|  |         if isinstance(spec, basestring): | ||||||
|  |             spec = {'fields': [spec]} | ||||||
|  |         if isinstance(spec, (list, tuple)): | ||||||
|  |             spec = {'fields': spec} | ||||||
|  |  | ||||||
|  |         index_list = [] | ||||||
|  |         use_types = doc_cls._meta.get('allow_inheritance', True) | ||||||
|  |         for key in spec['fields']: | ||||||
|  |             # Get ASCENDING direction from +, DESCENDING from -, and GEO2D from * | ||||||
|  |             direction = pymongo.ASCENDING | ||||||
|  |             if key.startswith("-"): | ||||||
|  |                 direction = pymongo.DESCENDING | ||||||
|  |             elif key.startswith("*"): | ||||||
|  |                 direction = pymongo.GEO2D | ||||||
|  |             if key.startswith(("+", "-", "*")): | ||||||
|  |                 key = key[1:] | ||||||
|  |  | ||||||
|  |             # Use real field name, do it manually because we need field | ||||||
|  |             # objects for the next part (list field checking) | ||||||
|  |             parts = key.split('.') | ||||||
|  |             if parts in (['pk'], ['id'], ['_id']): | ||||||
|  |                 key = '_id' | ||||||
|  |             else: | ||||||
|  |                 fields = QuerySet._lookup_field(doc_cls, parts) | ||||||
|  |                 parts = [field if field == '_id' else field.db_field for field in fields] | ||||||
|  |                 key = '.'.join(parts) | ||||||
|  |             index_list.append((key, direction)) | ||||||
|  |  | ||||||
|  |             # If sparse - dont include types | ||||||
|  |             if spec.get('sparse', False): | ||||||
|  |                 use_types = False | ||||||
|  |  | ||||||
|  |             # Check if a list field is being used, don't use _types if it is | ||||||
|  |             if use_types and not all(f._index_with_types for f in fields): | ||||||
|  |                 use_types = False | ||||||
|  |  | ||||||
|  |         # If _types is being used, prepend it to every specified index | ||||||
|  |         index_types = doc_cls._meta.get('index_types', True) | ||||||
|  |         allow_inheritance = doc_cls._meta.get('allow_inheritance') | ||||||
|  |         if spec.get('types', index_types) and allow_inheritance and use_types and direction is not pymongo.GEO2D: | ||||||
|  |             index_list.insert(0, ('_types', 1)) | ||||||
|  |  | ||||||
|  |         spec['fields'] = index_list | ||||||
|  |         if spec.get('sparse', False) and len(spec['fields']) > 1: | ||||||
|  |             raise ValueError( | ||||||
|  |                 'Sparse indexes can only have one field in them. ' | ||||||
|  |                 'See https://jira.mongodb.org/browse/SERVER-2193') | ||||||
|  |  | ||||||
|  |         return spec | ||||||
|  |  | ||||||
|  |     @classmethod | ||||||
|  |     def _reset_already_indexed(cls, document=None): | ||||||
|  |         """Helper to reset already indexed, can be useful for testing purposes""" | ||||||
|  |         if document: | ||||||
|  |             cls.__already_indexed.discard(document) | ||||||
|  |         cls.__already_indexed.clear() | ||||||
|  |  | ||||||
|  |  | ||||||
|     @property |     @property | ||||||
|     def _collection(self): |     def _collection(self): | ||||||
|         """Property that returns the collection object. This allows us to |         """Property that returns the collection object. This allows us to | ||||||
|         perform operations only if the collection is accessed. |         perform operations only if the collection is accessed. | ||||||
|         """ |         """ | ||||||
|         if self._document not in QuerySet.__already_indexed: |         if self._document not in QuerySet.__already_indexed: | ||||||
|  |             # Ensure collection exists | ||||||
|  |             db = self._document._get_db() | ||||||
|  |             if self._collection_obj.name not in db.collection_names(): | ||||||
|  |                 self._document._collection = None | ||||||
|  |                 self._collection_obj = self._document._get_collection() | ||||||
|  |  | ||||||
|             QuerySet.__already_indexed.add(self._document) |             QuerySet.__already_indexed.add(self._document) | ||||||
|  |  | ||||||
|             background = self._document._meta.get('index_background', False) |             if self._document._meta.get('auto_create_index', True): | ||||||
|             drop_dups = self._document._meta.get('index_drop_dups', False) |                 self._ensure_indexes() | ||||||
|             index_opts = self._document._meta.get('index_options', {}) |  | ||||||
|             index_types = self._document._meta.get('index_types', True) |  | ||||||
|  |  | ||||||
|             # determine if an index which we are creating includes |  | ||||||
|             # _type as its first field; if so, we can avoid creating |  | ||||||
|             # an extra index on _type, as mongodb will use the existing |  | ||||||
|             # index to service queries against _type |  | ||||||
|             types_indexed = False |  | ||||||
|             def includes_types(fields): |  | ||||||
|                 first_field = None |  | ||||||
|                 if len(fields): |  | ||||||
|                     if isinstance(fields[0], basestring): |  | ||||||
|                         first_field = fields[0] |  | ||||||
|                     elif isinstance(fields[0], (list, tuple)) and len(fields[0]): |  | ||||||
|                         first_field = fields[0][0] |  | ||||||
|                 return first_field == '_types' |  | ||||||
|  |  | ||||||
|             # Ensure indexes created by uniqueness constraints |  | ||||||
|             for index in self._document._meta['unique_indexes']: |  | ||||||
|                 types_indexed = types_indexed or includes_types(index) |  | ||||||
|                 self._collection.ensure_index(index, unique=True, |  | ||||||
|                     background=background, drop_dups=drop_dups, **index_opts) |  | ||||||
|  |  | ||||||
|             # Ensure document-defined indexes are created |  | ||||||
|             if self._document._meta['indexes']: |  | ||||||
|                 for spec in self._document._meta['indexes']: |  | ||||||
|                     types_indexed = types_indexed or includes_types(spec['fields']) |  | ||||||
|                     opts = index_opts.copy() |  | ||||||
|                     opts['unique'] = spec.get('unique', False) |  | ||||||
|                     opts['sparse'] = spec.get('sparse', False) |  | ||||||
|                     self._collection.ensure_index(spec['fields'], |  | ||||||
|                         background=background, **opts) |  | ||||||
|  |  | ||||||
|             # If _types is being used (for polymorphism), it needs an index, |  | ||||||
|             # only if another index doesn't begin with _types |  | ||||||
|             if index_types and '_types' in self._query and not types_indexed: |  | ||||||
|                 self._collection.ensure_index('_types', |  | ||||||
|                     background=background, **index_opts) |  | ||||||
|  |  | ||||||
|             # Add geo indicies |  | ||||||
|             for field in self._document._geo_indices(): |  | ||||||
|                 index_spec = [(field.db_field, pymongo.GEO2D)] |  | ||||||
|                 self._collection.ensure_index(index_spec, |  | ||||||
|                     background=background, **index_opts) |  | ||||||
|  |  | ||||||
|         return self._collection_obj |         return self._collection_obj | ||||||
|  |  | ||||||
| @@ -555,7 +590,7 @@ class QuerySet(object): | |||||||
|                 self.order_by(*self._document._meta['ordering']) |                 self.order_by(*self._document._meta['ordering']) | ||||||
|  |  | ||||||
|             if self._limit is not None: |             if self._limit is not None: | ||||||
|                 self._cursor_obj.limit(self._limit) |                 self._cursor_obj.limit(self._limit - (self._skip or 0)) | ||||||
|  |  | ||||||
|             if self._skip is not None: |             if self._skip is not None: | ||||||
|                 self._cursor_obj.skip(self._skip) |                 self._cursor_obj.skip(self._skip) | ||||||
| @@ -585,15 +620,29 @@ class QuerySet(object): | |||||||
|                         "Can't use index on unsubscriptable field (%s)" % err) |                         "Can't use index on unsubscriptable field (%s)" % err) | ||||||
|                 fields.append(field_name) |                 fields.append(field_name) | ||||||
|                 continue |                 continue | ||||||
|  |  | ||||||
|             if field is None: |             if field is None: | ||||||
|                 # Look up first field from the document |                 # Look up first field from the document | ||||||
|                 if field_name == 'pk': |                 if field_name == 'pk': | ||||||
|                     # Deal with "primary key" alias |                     # Deal with "primary key" alias | ||||||
|                     field_name = document._meta['id_field'] |                     field_name = document._meta['id_field'] | ||||||
|                 field = document._fields[field_name] |                 if field_name in document._fields: | ||||||
|  |                     field = document._fields[field_name] | ||||||
|  |                 elif document._dynamic: | ||||||
|  |                     from fields import DynamicField | ||||||
|  |                     field = DynamicField(db_field=field_name) | ||||||
|  |                 else: | ||||||
|  |                     raise InvalidQueryError('Cannot resolve field "%s"' | ||||||
|  |                                                 % field_name) | ||||||
|             else: |             else: | ||||||
|                 # Look up subfield on the previous field |                 from mongoengine.fields import ReferenceField, GenericReferenceField | ||||||
|                 new_field = field.lookup_member(field_name) |                 if isinstance(field, (ReferenceField, GenericReferenceField)): | ||||||
|  |                     raise InvalidQueryError('Cannot perform join in mongoDB: %s' % '__'.join(parts)) | ||||||
|  |                 if getattr(field, 'field', None): | ||||||
|  |                     new_field = field.field.lookup_member(field_name) | ||||||
|  |                 else: | ||||||
|  |                    # Look up subfield on the previous field | ||||||
|  |                     new_field = field.lookup_member(field_name) | ||||||
|                 from base import ComplexBaseField |                 from base import ComplexBaseField | ||||||
|                 if not new_field and isinstance(field, ComplexBaseField): |                 if not new_field and isinstance(field, ComplexBaseField): | ||||||
|                     fields.append(field_name) |                     fields.append(field_name) | ||||||
| @@ -603,7 +652,6 @@ class QuerySet(object): | |||||||
|                                                 % field_name) |                                                 % field_name) | ||||||
|                 field = new_field  # update field to the new field type |                 field = new_field  # update field to the new field type | ||||||
|             fields.append(field) |             fields.append(field) | ||||||
|  |  | ||||||
|         return fields |         return fields | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
| @@ -624,6 +672,7 @@ class QuerySet(object): | |||||||
|         match_operators = ['contains', 'icontains', 'startswith', |         match_operators = ['contains', 'icontains', 'startswith', | ||||||
|                            'istartswith', 'endswith', 'iendswith', |                            'istartswith', 'endswith', 'iendswith', | ||||||
|                            'exact', 'iexact'] |                            'exact', 'iexact'] | ||||||
|  |         custom_operators = ['match'] | ||||||
|  |  | ||||||
|         mongo_query = {} |         mongo_query = {} | ||||||
|         for key, value in query.items(): |         for key, value in query.items(): | ||||||
| @@ -636,7 +685,7 @@ class QuerySet(object): | |||||||
|             parts = [part for part in parts if not part.isdigit()] |             parts = [part for part in parts if not part.isdigit()] | ||||||
|             # Check for an operator and transform to mongo-style if there is |             # Check for an operator and transform to mongo-style if there is | ||||||
|             op = None |             op = None | ||||||
|             if parts[-1] in operators + match_operators + geo_operators: |             if parts[-1] in operators + match_operators + geo_operators + custom_operators: | ||||||
|                 op = parts.pop() |                 op = parts.pop() | ||||||
|  |  | ||||||
|             negate = False |             negate = False | ||||||
| @@ -650,8 +699,8 @@ class QuerySet(object): | |||||||
|                 parts = [] |                 parts = [] | ||||||
|  |  | ||||||
|                 cleaned_fields = [] |                 cleaned_fields = [] | ||||||
|                 append_field = True |  | ||||||
|                 for field in fields: |                 for field in fields: | ||||||
|  |                     append_field = True | ||||||
|                     if isinstance(field, str): |                     if isinstance(field, str): | ||||||
|                         parts.append(field) |                         parts.append(field) | ||||||
|                         append_field = False |                         append_field = False | ||||||
| @@ -669,7 +718,7 @@ class QuerySet(object): | |||||||
|                     if isinstance(field, basestring): |                     if isinstance(field, basestring): | ||||||
|                         if op in match_operators and isinstance(value, basestring): |                         if op in match_operators and isinstance(value, basestring): | ||||||
|                             from mongoengine import StringField |                             from mongoengine import StringField | ||||||
|                             value = StringField().prepare_query_value(op, value) |                             value = StringField.prepare_query_value(op, value) | ||||||
|                         else: |                         else: | ||||||
|                             value = field |                             value = field | ||||||
|                     else: |                     else: | ||||||
| @@ -696,6 +745,12 @@ class QuerySet(object): | |||||||
|                     else: |                     else: | ||||||
|                         raise NotImplementedError("Geo method '%s' has not " |                         raise NotImplementedError("Geo method '%s' has not " | ||||||
|                                                   "been implemented" % op) |                                                   "been implemented" % op) | ||||||
|  |                 elif op in custom_operators: | ||||||
|  |                     if op == 'match': | ||||||
|  |                         value = {"$elemMatch": value} | ||||||
|  |                     else: | ||||||
|  |                         NotImplementedError("Custom method '%s' has not " | ||||||
|  |                                             "been implemented" % op) | ||||||
|                 elif op not in match_operators: |                 elif op not in match_operators: | ||||||
|                     value = {'$' + op: value} |                     value = {'$' + op: value} | ||||||
|  |  | ||||||
| @@ -721,18 +776,23 @@ class QuerySet(object): | |||||||
|  |  | ||||||
|         .. versionadded:: 0.3 |         .. versionadded:: 0.3 | ||||||
|         """ |         """ | ||||||
|  |         self.limit(2) | ||||||
|         self.__call__(*q_objs, **query) |         self.__call__(*q_objs, **query) | ||||||
|         count = self.count() |         try: | ||||||
|         if count == 1: |             result1 = self.next() | ||||||
|             return self[0] |         except StopIteration: | ||||||
|         elif count > 1: |  | ||||||
|             message = u'%d items returned, instead of 1' % count |  | ||||||
|             raise self._document.MultipleObjectsReturned(message) |  | ||||||
|         else: |  | ||||||
|             raise self._document.DoesNotExist("%s matching query does not exist." |             raise self._document.DoesNotExist("%s matching query does not exist." | ||||||
|                                               % self._document._class_name) |                                               % self._document._class_name) | ||||||
|  |         try: | ||||||
|  |             result2 = self.next() | ||||||
|  |         except StopIteration: | ||||||
|  |             return result1 | ||||||
|  |  | ||||||
|     def get_or_create(self, write_options=None, *q_objs, **query): |         self.rewind() | ||||||
|  |         message = u'%d items returned, instead of 1' % self.count() | ||||||
|  |         raise self._document.MultipleObjectsReturned(message) | ||||||
|  |  | ||||||
|  |     def get_or_create(self, write_options=None, auto_save=True, *q_objs, **query): | ||||||
|         """Retrieve unique object or create, if it doesn't exist. Returns a tuple of |         """Retrieve unique object or create, if it doesn't exist. Returns a tuple of | ||||||
|         ``(object, created)``, where ``object`` is the retrieved or created object |         ``(object, created)``, where ``object`` is the retrieved or created object | ||||||
|         and ``created`` is a boolean specifying whether a new object was created. Raises |         and ``created`` is a boolean specifying whether a new object was created. Raises | ||||||
| @@ -742,28 +802,35 @@ class QuerySet(object): | |||||||
|         dictionary of default values for the new document may be provided as a |         dictionary of default values for the new document may be provided as a | ||||||
|         keyword argument called :attr:`defaults`. |         keyword argument called :attr:`defaults`. | ||||||
|  |  | ||||||
|  |         .. note:: This requires two separate operations and therefore a | ||||||
|  |         race condition exists.  Because there are no transactions in mongoDB | ||||||
|  |         other approaches should be investigated, to ensure you don't | ||||||
|  |         accidently duplicate data when using this method. | ||||||
|  |  | ||||||
|         :param write_options: optional extra keyword arguments used if we |         :param write_options: optional extra keyword arguments used if we | ||||||
|             have to create a new document. |             have to create a new document. | ||||||
|             Passes any write_options onto :meth:`~mongoengine.Document.save` |             Passes any write_options onto :meth:`~mongoengine.Document.save` | ||||||
|  |  | ||||||
|         .. versionadded:: 0.3 |         .. versionadded:: 0.3 | ||||||
|  |  | ||||||
|  |         :param auto_save: if the object is to be saved automatically if not found. | ||||||
|  |  | ||||||
|  |         .. versionadded:: 0.6 | ||||||
|         """ |         """ | ||||||
|         defaults = query.get('defaults', {}) |         defaults = query.get('defaults', {}) | ||||||
|         if 'defaults' in query: |         if 'defaults' in query: | ||||||
|             del query['defaults'] |             del query['defaults'] | ||||||
|  |  | ||||||
|         self.__call__(*q_objs, **query) |         try: | ||||||
|         count = self.count() |             doc = self.get(*q_objs, **query) | ||||||
|         if count == 0: |             return doc, False | ||||||
|  |         except self._document.DoesNotExist: | ||||||
|             query.update(defaults) |             query.update(defaults) | ||||||
|             doc = self._document(**query) |             doc = self._document(**query) | ||||||
|             doc.save(write_options=write_options) |  | ||||||
|  |             if auto_save: | ||||||
|  |                 doc.save(write_options=write_options) | ||||||
|             return doc, True |             return doc, True | ||||||
|         elif count == 1: |  | ||||||
|             return self.first(), False |  | ||||||
|         else: |  | ||||||
|             message = u'%d items returned, instead of 1' % count |  | ||||||
|             raise self._document.MultipleObjectsReturned(message) |  | ||||||
|  |  | ||||||
|     def create(self, **kwargs): |     def create(self, **kwargs): | ||||||
|         """Create new object. Returns the saved object instance. |         """Create new object. Returns the saved object instance. | ||||||
| @@ -783,11 +850,21 @@ class QuerySet(object): | |||||||
|             result = None |             result = None | ||||||
|         return result |         return result | ||||||
|  |  | ||||||
|     def insert(self, doc_or_docs, load_bulk=True): |     def insert(self, doc_or_docs, load_bulk=True, safe=False, write_options=None): | ||||||
|         """bulk insert documents |         """bulk insert documents | ||||||
|  |  | ||||||
|  |         If ``safe=True`` and the operation is unsuccessful, an | ||||||
|  |         :class:`~mongoengine.OperationError` will be raised. | ||||||
|  |  | ||||||
|         :param docs_or_doc: a document or list of documents to be inserted |         :param docs_or_doc: a document or list of documents to be inserted | ||||||
|         :param load_bulk (optional): If True returns the list of document instances |         :param load_bulk (optional): If True returns the list of document instances | ||||||
|  |         :param safe: check if the operation succeeded before returning | ||||||
|  |         :param write_options: Extra keyword arguments are passed down to | ||||||
|  |                 :meth:`~pymongo.collection.Collection.insert` | ||||||
|  |                 which will be used as options for the resultant ``getLastError`` command. | ||||||
|  |                 For example, ``insert(..., {w: 2, fsync: True})`` will wait until at least two | ||||||
|  |                 servers have recorded the write and will force an fsync on each server being | ||||||
|  |                 written to. | ||||||
|  |  | ||||||
|         By default returns document instances, set ``load_bulk`` to False to |         By default returns document instances, set ``load_bulk`` to False to | ||||||
|         return just ``ObjectIds`` |         return just ``ObjectIds`` | ||||||
| @@ -796,6 +873,10 @@ class QuerySet(object): | |||||||
|         """ |         """ | ||||||
|         from document import Document |         from document import Document | ||||||
|  |  | ||||||
|  |         if not write_options: | ||||||
|  |             write_options = {} | ||||||
|  |         write_options.update({'safe': safe}) | ||||||
|  |  | ||||||
|         docs = doc_or_docs |         docs = doc_or_docs | ||||||
|         return_one = False |         return_one = False | ||||||
|         if isinstance(docs, Document) or issubclass(docs.__class__, Document): |         if isinstance(docs, Document) or issubclass(docs.__class__, Document): | ||||||
| @@ -812,23 +893,39 @@ class QuerySet(object): | |||||||
|                 raise OperationError(msg) |                 raise OperationError(msg) | ||||||
|             raw.append(doc.to_mongo()) |             raw.append(doc.to_mongo()) | ||||||
|  |  | ||||||
|         ids = self._collection.insert(raw) |         signals.pre_bulk_insert.send(self._document, documents=docs) | ||||||
|  |         try: | ||||||
|  |             ids = self._collection.insert(raw, **write_options) | ||||||
|  |         except pymongo.errors.OperationFailure, err: | ||||||
|  |             message = 'Could not save document (%s)' | ||||||
|  |             if u'duplicate key' in unicode(err): | ||||||
|  |                 message = u'Tried to save duplicate unique keys (%s)' | ||||||
|  |             raise OperationError(message % unicode(err)) | ||||||
|  |  | ||||||
|         if not load_bulk: |         if not load_bulk: | ||||||
|  |             signals.post_bulk_insert.send( | ||||||
|  |                     self._document, documents=docs, loaded=False) | ||||||
|             return return_one and ids[0] or ids |             return return_one and ids[0] or ids | ||||||
|  |  | ||||||
|         documents = self.in_bulk(ids) |         documents = self.in_bulk(ids) | ||||||
|         results = [] |         results = [] | ||||||
|         for obj_id in ids: |         for obj_id in ids: | ||||||
|             results.append(documents.get(obj_id)) |             results.append(documents.get(obj_id)) | ||||||
|  |         signals.post_bulk_insert.send( | ||||||
|  |                 self._document, documents=results, loaded=True) | ||||||
|         return return_one and results[0] or results |         return return_one and results[0] or results | ||||||
|  |  | ||||||
|     def with_id(self, object_id): |     def with_id(self, object_id): | ||||||
|         """Retrieve the object matching the id provided. |         """Retrieve the object matching the id provided.  Uses `object_id` only | ||||||
|  |         and raises InvalidQueryError if a filter has been applied. | ||||||
|  |  | ||||||
|         :param object_id: the value for the id of the document to look up |         :param object_id: the value for the id of the document to look up | ||||||
|  |  | ||||||
|  |         .. versionchanged:: 0.6 Raises InvalidQueryError if filter has been set | ||||||
|         """ |         """ | ||||||
|         return self._document.objects(pk=object_id).first() |         if not self._query_obj.empty: | ||||||
|  |             raise InvalidQueryError("Cannot use a filter whilst using `with_id`") | ||||||
|  |         return self.filter(pk=object_id).first() | ||||||
|  |  | ||||||
|     def in_bulk(self, object_ids): |     def in_bulk(self, object_ids): | ||||||
|         """Retrieve a set of documents by their ids. |         """Retrieve a set of documents by their ids. | ||||||
| @@ -843,8 +940,13 @@ class QuerySet(object): | |||||||
|  |  | ||||||
|         docs = self._collection.find({'_id': {'$in': object_ids}}, |         docs = self._collection.find({'_id': {'$in': object_ids}}, | ||||||
|                                      **self._cursor_args) |                                      **self._cursor_args) | ||||||
|         for doc in docs: |         if self._scalar: | ||||||
|             doc_map[doc['_id']] = self._document._from_son(doc) |             for doc in docs: | ||||||
|  |                 doc_map[doc['_id']] = self._get_scalar( | ||||||
|  |                         self._document._from_son(doc)) | ||||||
|  |         else: | ||||||
|  |             for doc in docs: | ||||||
|  |                 doc_map[doc['_id']] = self._document._from_son(doc) | ||||||
|  |  | ||||||
|         return doc_map |         return doc_map | ||||||
|  |  | ||||||
| @@ -854,6 +956,9 @@ class QuerySet(object): | |||||||
|         try: |         try: | ||||||
|             if self._limit == 0: |             if self._limit == 0: | ||||||
|                 raise StopIteration |                 raise StopIteration | ||||||
|  |             if self._scalar: | ||||||
|  |                 return self._get_scalar(self._document._from_son( | ||||||
|  |                         self._cursor.next())) | ||||||
|             return self._document._from_son(self._cursor.next()) |             return self._document._from_son(self._cursor.next()) | ||||||
|         except StopIteration, e: |         except StopIteration, e: | ||||||
|             self.rewind() |             self.rewind() | ||||||
| @@ -887,9 +992,9 @@ class QuerySet(object): | |||||||
|         and :meth:`~mongoengine.tests.QuerySetTest.test_map_advanced` |         and :meth:`~mongoengine.tests.QuerySetTest.test_map_advanced` | ||||||
|         tests in ``tests.queryset.QuerySetTest`` for usage examples. |         tests in ``tests.queryset.QuerySetTest`` for usage examples. | ||||||
|  |  | ||||||
|         :param map_f: map function, as :class:`~pymongo.code.Code` or string |         :param map_f: map function, as :class:`~bson.code.Code` or string | ||||||
|         :param reduce_f: reduce function, as |         :param reduce_f: reduce function, as | ||||||
|                          :class:`~pymongo.code.Code` or string |                          :class:`~bson.code.Code` or string | ||||||
|         :param output: output collection name, if set to 'inline' will try to |         :param output: output collection name, if set to 'inline' will try to | ||||||
|                        use :class:`~pymongo.collection.Collection.inline_map_reduce` |                        use :class:`~pymongo.collection.Collection.inline_map_reduce` | ||||||
|         :param finalize_f: finalize function, an optional function that |         :param finalize_f: finalize function, an optional function that | ||||||
| @@ -919,27 +1024,27 @@ class QuerySet(object): | |||||||
|             raise NotImplementedError("Requires MongoDB >= 1.7.1") |             raise NotImplementedError("Requires MongoDB >= 1.7.1") | ||||||
|  |  | ||||||
|         map_f_scope = {} |         map_f_scope = {} | ||||||
|         if isinstance(map_f, pymongo.code.Code): |         if isinstance(map_f, Code): | ||||||
|             map_f_scope = map_f.scope |             map_f_scope = map_f.scope | ||||||
|             map_f = unicode(map_f) |             map_f = unicode(map_f) | ||||||
|         map_f = pymongo.code.Code(self._sub_js_fields(map_f), map_f_scope) |         map_f = Code(self._sub_js_fields(map_f), map_f_scope) | ||||||
|  |  | ||||||
|         reduce_f_scope = {} |         reduce_f_scope = {} | ||||||
|         if isinstance(reduce_f, pymongo.code.Code): |         if isinstance(reduce_f, Code): | ||||||
|             reduce_f_scope = reduce_f.scope |             reduce_f_scope = reduce_f.scope | ||||||
|             reduce_f = unicode(reduce_f) |             reduce_f = unicode(reduce_f) | ||||||
|         reduce_f_code = self._sub_js_fields(reduce_f) |         reduce_f_code = self._sub_js_fields(reduce_f) | ||||||
|         reduce_f = pymongo.code.Code(reduce_f_code, reduce_f_scope) |         reduce_f = Code(reduce_f_code, reduce_f_scope) | ||||||
|  |  | ||||||
|         mr_args = {'query': self._query} |         mr_args = {'query': self._query} | ||||||
|  |  | ||||||
|         if finalize_f: |         if finalize_f: | ||||||
|             finalize_f_scope = {} |             finalize_f_scope = {} | ||||||
|             if isinstance(finalize_f, pymongo.code.Code): |             if isinstance(finalize_f, Code): | ||||||
|                 finalize_f_scope = finalize_f.scope |                 finalize_f_scope = finalize_f.scope | ||||||
|                 finalize_f = unicode(finalize_f) |                 finalize_f = unicode(finalize_f) | ||||||
|             finalize_f_code = self._sub_js_fields(finalize_f) |             finalize_f_code = self._sub_js_fields(finalize_f) | ||||||
|             finalize_f = pymongo.code.Code(finalize_f_code, finalize_f_scope) |             finalize_f = Code(finalize_f_code, finalize_f_scope) | ||||||
|             mr_args['finalize'] = finalize_f |             mr_args['finalize'] = finalize_f | ||||||
|  |  | ||||||
|         if scope: |         if scope: | ||||||
| @@ -1030,6 +1135,9 @@ class QuerySet(object): | |||||||
|             return self |             return self | ||||||
|         # Integer index provided |         # Integer index provided | ||||||
|         elif isinstance(key, int): |         elif isinstance(key, int): | ||||||
|  |             if self._scalar: | ||||||
|  |                 return self._get_scalar(self._document._from_son( | ||||||
|  |                         self._cursor[key])) | ||||||
|             return self._document._from_son(self._cursor[key]) |             return self._document._from_son(self._cursor[key]) | ||||||
|         raise AttributeError |         raise AttributeError | ||||||
|  |  | ||||||
| @@ -1039,8 +1147,10 @@ class QuerySet(object): | |||||||
|         :param field: the field to select distinct values from |         :param field: the field to select distinct values from | ||||||
|  |  | ||||||
|         .. versionadded:: 0.4 |         .. versionadded:: 0.4 | ||||||
|  |         .. versionchanged:: 0.5 - Fixed handling references | ||||||
|         """ |         """ | ||||||
|         return self._cursor.distinct(field) |         from dereference import DeReference | ||||||
|  |         return DeReference()(self._cursor.distinct(field), 1) | ||||||
|  |  | ||||||
|     def only(self, *fields): |     def only(self, *fields): | ||||||
|         """Load only a subset of this document's fields. :: |         """Load only a subset of this document's fields. :: | ||||||
| @@ -1209,11 +1319,17 @@ class QuerySet(object): | |||||||
|             document_cls, field_name = rule_entry |             document_cls, field_name = rule_entry | ||||||
|             rule = doc._meta['delete_rules'][rule_entry] |             rule = doc._meta['delete_rules'][rule_entry] | ||||||
|             if rule == CASCADE: |             if rule == CASCADE: | ||||||
|                 document_cls.objects(**{field_name + '__in': self}).delete(safe=safe) |                 ref_q = document_cls.objects(**{field_name + '__in': self}) | ||||||
|  |                 if doc != document_cls or (doc == document_cls and ref_q.count() > 0): | ||||||
|  |                     ref_q.delete(safe=safe) | ||||||
|             elif rule == NULLIFY: |             elif rule == NULLIFY: | ||||||
|                 document_cls.objects(**{field_name + '__in': self}).update( |                 document_cls.objects(**{field_name + '__in': self}).update( | ||||||
|                         safe_update=safe, |                         safe_update=safe, | ||||||
|                         **{'unset__%s' % field_name: 1}) |                         **{'unset__%s' % field_name: 1}) | ||||||
|  |             elif rule == PULL: | ||||||
|  |                 document_cls.objects(**{field_name + '__in': self}).update( | ||||||
|  |                         safe_update=safe, | ||||||
|  |                         **{'pull_all__%s' % field_name: self}) | ||||||
|  |  | ||||||
|         self._collection.remove(self._query, safe=safe) |         self._collection.remove(self._query, safe=safe) | ||||||
|  |  | ||||||
| @@ -1226,6 +1342,9 @@ class QuerySet(object): | |||||||
|  |  | ||||||
|         mongo_update = {} |         mongo_update = {} | ||||||
|         for key, value in update.items(): |         for key, value in update.items(): | ||||||
|  |             if key == "__raw__": | ||||||
|  |                 mongo_update.update(value) | ||||||
|  |                 continue | ||||||
|             parts = key.split('__') |             parts = key.split('__') | ||||||
|             # Check for an operator and transform to mongo-style if there is |             # Check for an operator and transform to mongo-style if there is | ||||||
|             op = None |             op = None | ||||||
| @@ -1249,8 +1368,8 @@ class QuerySet(object): | |||||||
|                 parts = [] |                 parts = [] | ||||||
|  |  | ||||||
|                 cleaned_fields = [] |                 cleaned_fields = [] | ||||||
|                 append_field = True |  | ||||||
|                 for field in fields: |                 for field in fields: | ||||||
|  |                     append_field = True | ||||||
|                     if isinstance(field, str): |                     if isinstance(field, str): | ||||||
|                         # Convert the S operator to $ |                         # Convert the S operator to $ | ||||||
|                         if field == 'S': |                         if field == 'S': | ||||||
| @@ -1266,17 +1385,30 @@ class QuerySet(object): | |||||||
|                 field = cleaned_fields[-1] |                 field = cleaned_fields[-1] | ||||||
|  |  | ||||||
|                 if op in (None, 'set', 'push', 'pull', 'addToSet'): |                 if op in (None, 'set', 'push', 'pull', 'addToSet'): | ||||||
|                     value = field.prepare_query_value(op, value) |                     if field.required or value is not None: | ||||||
|  |                         value = field.prepare_query_value(op, value) | ||||||
|                 elif op in ('pushAll', 'pullAll'): |                 elif op in ('pushAll', 'pullAll'): | ||||||
|                     value = [field.prepare_query_value(op, v) for v in value] |                     value = [field.prepare_query_value(op, v) for v in value] | ||||||
|  |  | ||||||
|             key = '.'.join(parts) |             key = '.'.join(parts) | ||||||
|  |  | ||||||
|             if op: |             if not op: | ||||||
|                 value = {key: value} |                 raise InvalidQueryError("Updates must supply an operation eg: set__FIELD=value") | ||||||
|                 key = '$' + op |  | ||||||
|  |  | ||||||
|             if op is None or key not in mongo_update: |             if 'pull' in op and '.' in key: | ||||||
|  |                 # Dot operators don't work on pull operations | ||||||
|  |                 # it uses nested dict syntax | ||||||
|  |                 if op == 'pullAll': | ||||||
|  |                     raise InvalidQueryError("pullAll operations only support a single field depth") | ||||||
|  |  | ||||||
|  |                 parts.reverse() | ||||||
|  |                 for key in parts: | ||||||
|  |                     value = {key: value} | ||||||
|  |             else: | ||||||
|  |                 value = {key: value} | ||||||
|  |             key = '$' + op | ||||||
|  |  | ||||||
|  |             if key not in mongo_update: | ||||||
|                 mongo_update[key] = value |                 mongo_update[key] = value | ||||||
|             elif key in mongo_update and isinstance(mongo_update[key], dict): |             elif key in mongo_update and isinstance(mongo_update[key], dict): | ||||||
|                 mongo_update[key].update(value) |                 mongo_update[key].update(value) | ||||||
| @@ -1300,8 +1432,15 @@ class QuerySet(object): | |||||||
|             write_options = {} |             write_options = {} | ||||||
|  |  | ||||||
|         update = QuerySet._transform_update(self._document, **update) |         update = QuerySet._transform_update(self._document, **update) | ||||||
|  |         query = self._query | ||||||
|  |  | ||||||
|  |         # SERVER-5247 hack | ||||||
|  |         remove_types = "_types" in query and ".$." in unicode(update) | ||||||
|  |         if remove_types: | ||||||
|  |             del query["_types"] | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             ret = self._collection.update(self._query, update, multi=multi, |             ret = self._collection.update(query, update, multi=multi, | ||||||
|                                           upsert=upsert, safe=safe_update, |                                           upsert=upsert, safe=safe_update, | ||||||
|                                           **write_options) |                                           **write_options) | ||||||
|             if ret is not None and 'n' in ret: |             if ret is not None and 'n' in ret: | ||||||
| @@ -1329,10 +1468,17 @@ class QuerySet(object): | |||||||
|         if not write_options: |         if not write_options: | ||||||
|             write_options = {} |             write_options = {} | ||||||
|         update = QuerySet._transform_update(self._document, **update) |         update = QuerySet._transform_update(self._document, **update) | ||||||
|  |         query = self._query | ||||||
|  |  | ||||||
|  |         # SERVER-5247 hack | ||||||
|  |         remove_types = "_types" in query and ".$." in unicode(update) | ||||||
|  |         if remove_types: | ||||||
|  |             del query["_types"] | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             # Explicitly provide 'multi=False' to newer versions of PyMongo |             # Explicitly provide 'multi=False' to newer versions of PyMongo | ||||||
|             # as the default may change to 'True' |             # as the default may change to 'True' | ||||||
|             ret = self._collection.update(self._query, update, multi=False, |             ret = self._collection.update(query, update, multi=False, | ||||||
|                                           upsert=upsert, safe=safe_update, |                                           upsert=upsert, safe=safe_update, | ||||||
|                                            **write_options) |                                            **write_options) | ||||||
|  |  | ||||||
| @@ -1342,8 +1488,47 @@ class QuerySet(object): | |||||||
|             raise OperationError(u'Update failed [%s]' % unicode(e)) |             raise OperationError(u'Update failed [%s]' % unicode(e)) | ||||||
|  |  | ||||||
|     def __iter__(self): |     def __iter__(self): | ||||||
|  |         self.rewind() | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
|  |     def _get_scalar(self, doc): | ||||||
|  |  | ||||||
|  |         def lookup(obj, name): | ||||||
|  |             chunks = name.split('__') | ||||||
|  |             for chunk in chunks: | ||||||
|  |                 if hasattr(obj, '_db_field_map'): | ||||||
|  |                     chunk = obj._db_field_map.get(chunk, chunk) | ||||||
|  |                 obj = getattr(obj, chunk) | ||||||
|  |             return obj | ||||||
|  |  | ||||||
|  |         data = [lookup(doc, n) for n in self._scalar] | ||||||
|  |         if len(data) == 1: | ||||||
|  |             return data[0] | ||||||
|  |  | ||||||
|  |         return tuple(data) | ||||||
|  |  | ||||||
|  |     def scalar(self, *fields): | ||||||
|  |         """Instead of returning Document instances, return either a specific | ||||||
|  |         value or a tuple of values in order. | ||||||
|  |  | ||||||
|  |         This effects all results and can be unset by calling ``scalar`` | ||||||
|  |         without arguments. Calls ``only`` automatically. | ||||||
|  |  | ||||||
|  |         :param fields: One or more fields to return instead of a Document. | ||||||
|  |         """ | ||||||
|  |         self._scalar = list(fields) | ||||||
|  |  | ||||||
|  |         if fields: | ||||||
|  |             self.only(*fields) | ||||||
|  |         else: | ||||||
|  |             self.all_fields() | ||||||
|  |  | ||||||
|  |         return self | ||||||
|  |  | ||||||
|  |     def values_list(self, *fields): | ||||||
|  |         """An alias for scalar""" | ||||||
|  |         return self.scalar(*fields) | ||||||
|  |  | ||||||
|     def _sub_js_fields(self, code): |     def _sub_js_fields(self, code): | ||||||
|         """When fields are specified with [~fieldname] syntax, where |         """When fields are specified with [~fieldname] syntax, where | ||||||
|         *fieldname* is the Python name of a field, *fieldname* will be |         *fieldname* is the Python name of a field, *fieldname* will be | ||||||
| @@ -1406,9 +1591,9 @@ class QuerySet(object): | |||||||
|             query['$where'] = self._where_clause |             query['$where'] = self._where_clause | ||||||
|  |  | ||||||
|         scope['query'] = query |         scope['query'] = query | ||||||
|         code = pymongo.code.Code(code, scope=scope) |         code = Code(code, scope=scope) | ||||||
|  |  | ||||||
|         db = _get_db() |         db = self._document._get_db() | ||||||
|         return db.eval(code, *fields) |         return db.eval(code, *fields) | ||||||
|  |  | ||||||
|     def where(self, where_clause): |     def where(self, where_clause): | ||||||
| @@ -1435,13 +1620,13 @@ class QuerySet(object): | |||||||
|         .. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work |         .. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work | ||||||
|             with sharding. |             with sharding. | ||||||
|         """ |         """ | ||||||
|         map_func = pymongo.code.Code(""" |         map_func = Code(""" | ||||||
|             function() { |             function() { | ||||||
|                 emit(1, this[field] || 0); |                 emit(1, this[field] || 0); | ||||||
|             } |             } | ||||||
|         """, scope={'field': field}) |         """, scope={'field': field}) | ||||||
|  |  | ||||||
|         reduce_func = pymongo.code.Code(""" |         reduce_func = Code(""" | ||||||
|             function(key, values) { |             function(key, values) { | ||||||
|                 var sum = 0; |                 var sum = 0; | ||||||
|                 for (var i in values) { |                 for (var i in values) { | ||||||
| @@ -1465,14 +1650,14 @@ class QuerySet(object): | |||||||
|         .. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work |         .. versionchanged:: 0.5 - updated to map_reduce as db.eval doesnt work | ||||||
|             with sharding. |             with sharding. | ||||||
|         """ |         """ | ||||||
|         map_func = pymongo.code.Code(""" |         map_func = Code(""" | ||||||
|             function() { |             function() { | ||||||
|                 if (this.hasOwnProperty(field)) |                 if (this.hasOwnProperty(field)) | ||||||
|                     emit(1, {t: this[field] || 0, c: 1}); |                     emit(1, {t: this[field] || 0, c: 1}); | ||||||
|             } |             } | ||||||
|         """, scope={'field': field}) |         """, scope={'field': field}) | ||||||
|  |  | ||||||
|         reduce_func = pymongo.code.Code(""" |         reduce_func = Code(""" | ||||||
|             function(key, values) { |             function(key, values) { | ||||||
|                 var out = {t: 0, c: 0}; |                 var out = {t: 0, c: 0}; | ||||||
|                 for (var i in values) { |                 for (var i in values) { | ||||||
| @@ -1484,7 +1669,7 @@ class QuerySet(object): | |||||||
|             } |             } | ||||||
|         """) |         """) | ||||||
|  |  | ||||||
|         finalize_func = pymongo.code.Code(""" |         finalize_func = Code(""" | ||||||
|             function(key, value) { |             function(key, value) { | ||||||
|                 return value.t / value.c; |                 return value.t / value.c; | ||||||
|             } |             } | ||||||
| @@ -1526,13 +1711,20 @@ class QuerySet(object): | |||||||
|             function() { |             function() { | ||||||
|                 path = '{{~%(field)s}}'.split('.'); |                 path = '{{~%(field)s}}'.split('.'); | ||||||
|                 field = this; |                 field = this; | ||||||
|                 for (p in path) { field = field[path[p]]; } |                 for (p in path) { | ||||||
|  |                     if (field) | ||||||
|  |                        field = field[path[p]]; | ||||||
|  |                     else | ||||||
|  |                        break; | ||||||
|  |                 } | ||||||
|                 if (field && field.constructor == Array) { |                 if (field && field.constructor == Array) { | ||||||
|                     field.forEach(function(item) { |                     field.forEach(function(item) { | ||||||
|                         emit(item, 1); |                         emit(item, 1); | ||||||
|                     }); |                     }); | ||||||
|                 } else { |                 } else if (field) { | ||||||
|                     emit(field, 1); |                     emit(field, 1); | ||||||
|  |                 } else { | ||||||
|  |                     emit(null, 1); | ||||||
|                 } |                 } | ||||||
|             } |             } | ||||||
|         """ % dict(field=field) |         """ % dict(field=field) | ||||||
| @@ -1572,7 +1764,12 @@ class QuerySet(object): | |||||||
|                     var total = 0.0; |                     var total = 0.0; | ||||||
|                     db[collection].find(query).forEach(function(doc) { |                     db[collection].find(query).forEach(function(doc) { | ||||||
|                         field = doc; |                         field = doc; | ||||||
|                         for (p in path) { field = field[path[p]]; } |                         for (p in path) { | ||||||
|  |                             if (field) | ||||||
|  |                                 field = field[path[p]]; | ||||||
|  |                             else | ||||||
|  |                                 break; | ||||||
|  |                         } | ||||||
|                         if (field && field.constructor == Array) { |                         if (field && field.constructor == Array) { | ||||||
|                             total += field.length; |                             total += field.length; | ||||||
|                         } else { |                         } else { | ||||||
| @@ -1588,7 +1785,12 @@ class QuerySet(object): | |||||||
|                 } |                 } | ||||||
|                 db[collection].find(query).forEach(function(doc) { |                 db[collection].find(query).forEach(function(doc) { | ||||||
|                     field = doc; |                     field = doc; | ||||||
|                     for (p in path) { field = field[path[p]]; } |                     for (p in path) { | ||||||
|  |                         if (field) | ||||||
|  |                             field = field[path[p]]; | ||||||
|  |                         else | ||||||
|  |                             break; | ||||||
|  |                     } | ||||||
|                     if (field && field.constructor == Array) { |                     if (field && field.constructor == Array) { | ||||||
|                         field.forEach(function(item) { |                         field.forEach(function(item) { | ||||||
|                             frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]); |                             frequencies[item] = inc + (isNaN(frequencies[item]) ? 0: frequencies[item]); | ||||||
| @@ -1609,10 +1811,16 @@ class QuerySet(object): | |||||||
|  |  | ||||||
|     def __repr__(self): |     def __repr__(self): | ||||||
|         limit = REPR_OUTPUT_SIZE + 1 |         limit = REPR_OUTPUT_SIZE + 1 | ||||||
|         if self._limit is not None and self._limit < limit: |         start = (0 if self._skip is None else self._skip) | ||||||
|             limit = self._limit |         if self._limit is None: | ||||||
|  |             stop = start + limit | ||||||
|  |         if self._limit is not None: | ||||||
|  |             if self._limit - start > limit: | ||||||
|  |                 stop = start + limit | ||||||
|  |             else: | ||||||
|  |                 stop = self._limit | ||||||
|         try: |         try: | ||||||
|             data = list(self[self._skip:limit]) |             data = list(self[start:stop]) | ||||||
|         except pymongo.errors.InvalidOperation: |         except pymongo.errors.InvalidOperation: | ||||||
|             return ".. queryset mid-iteration .." |             return ".. queryset mid-iteration .." | ||||||
|         if len(data) > REPR_OUTPUT_SIZE: |         if len(data) > REPR_OUTPUT_SIZE: | ||||||
| @@ -1620,13 +1828,15 @@ class QuerySet(object): | |||||||
|         return repr(data) |         return repr(data) | ||||||
|  |  | ||||||
|     def select_related(self, max_depth=1): |     def select_related(self, max_depth=1): | ||||||
|         """Handles dereferencing of :class:`~pymongo.dbref.DBRef` objects to |         """Handles dereferencing of :class:`~bson.dbref.DBRef` objects to | ||||||
|         a maximum depth in order to cut down the number queries to mongodb. |         a maximum depth in order to cut down the number queries to mongodb. | ||||||
|  |  | ||||||
|         .. versionadded:: 0.5 |         .. versionadded:: 0.5 | ||||||
|         """ |         """ | ||||||
|         from dereference import dereference |         from dereference import DeReference | ||||||
|         return dereference(self, max_depth=max_depth) |         # Make select related work the same for querysets | ||||||
|  |         max_depth += 1 | ||||||
|  |         return DeReference()(self, max_depth=max_depth) | ||||||
|  |  | ||||||
|  |  | ||||||
| class QuerySetManager(object): | class QuerySetManager(object): | ||||||
|   | |||||||
| @@ -42,3 +42,5 @@ pre_save = _signals.signal('pre_save') | |||||||
| post_save = _signals.signal('post_save') | post_save = _signals.signal('post_save') | ||||||
| pre_delete = _signals.signal('pre_delete') | pre_delete = _signals.signal('pre_delete') | ||||||
| post_delete = _signals.signal('post_delete') | post_delete = _signals.signal('post_delete') | ||||||
|  | pre_bulk_insert = _signals.signal('pre_bulk_insert') | ||||||
|  | post_bulk_insert = _signals.signal('post_bulk_insert') | ||||||
|   | |||||||
| @@ -1,4 +1,4 @@ | |||||||
| from mongoengine.connection import _get_db | from mongoengine.connection import get_db | ||||||
|  |  | ||||||
|  |  | ||||||
| class query_counter(object): | class query_counter(object): | ||||||
| @@ -7,7 +7,7 @@ class query_counter(object): | |||||||
|     def __init__(self): |     def __init__(self): | ||||||
|         """ Construct the query_counter. """ |         """ Construct the query_counter. """ | ||||||
|         self.counter = 0 |         self.counter = 0 | ||||||
|         self.db = _get_db() |         self.db = get_db() | ||||||
|  |  | ||||||
|     def __enter__(self): |     def __enter__(self): | ||||||
|         """ On every with block we need to drop the profile collection. """ |         """ On every with block we need to drop the profile collection. """ | ||||||
|   | |||||||
							
								
								
									
										54
									
								
								python-mongoengine.spec
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										54
									
								
								python-mongoengine.spec
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,54 @@ | |||||||
|  | # sitelib for noarch packages, sitearch for others (remove the unneeded one) | ||||||
|  | %{!?python_sitelib: %global python_sitelib %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())")} | ||||||
|  | %{!?python_sitearch: %global python_sitearch %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib(1))")} | ||||||
|  |  | ||||||
|  | %define srcname mongoengine | ||||||
|  |  | ||||||
|  | Name:           python-%{srcname} | ||||||
|  | Version:        0.6.11 | ||||||
|  | Release:        1%{?dist} | ||||||
|  | Summary:        A Python Document-Object Mapper for working with MongoDB | ||||||
|  |  | ||||||
|  | Group:          Development/Libraries | ||||||
|  | License:        MIT | ||||||
|  | URL:            https://github.com/MongoEngine/mongoengine | ||||||
|  | Source0:        %{srcname}-%{version}.tar.bz2 | ||||||
|  |  | ||||||
|  | BuildRequires:  python-devel | ||||||
|  | BuildRequires:  python-setuptools | ||||||
|  |  | ||||||
|  | Requires:       mongodb | ||||||
|  | Requires:       pymongo | ||||||
|  | Requires:       python-blinker | ||||||
|  | Requires:       python-imaging | ||||||
|  |  | ||||||
|  |  | ||||||
|  | %description | ||||||
|  | MongoEngine is an ORM-like layer on top of PyMongo. | ||||||
|  |  | ||||||
|  | %prep | ||||||
|  | %setup -q -n %{srcname}-%{version} | ||||||
|  |  | ||||||
|  |  | ||||||
|  | %build | ||||||
|  | # Remove CFLAGS=... for noarch packages (unneeded) | ||||||
|  | CFLAGS="$RPM_OPT_FLAGS" %{__python} setup.py build | ||||||
|  |  | ||||||
|  |  | ||||||
|  | %install | ||||||
|  | rm -rf $RPM_BUILD_ROOT | ||||||
|  | %{__python} setup.py install -O1 --skip-build --root $RPM_BUILD_ROOT | ||||||
|  |  | ||||||
|  | %clean | ||||||
|  | rm -rf $RPM_BUILD_ROOT | ||||||
|  |  | ||||||
|  | %files | ||||||
|  | %defattr(-,root,root,-) | ||||||
|  | %doc docs AUTHORS LICENSE README.rst | ||||||
|  | # For noarch packages: sitelib | ||||||
|  |  %{python_sitelib}/* | ||||||
|  | # For arch-specific packages: sitearch | ||||||
|  | # %{python_sitearch}/* | ||||||
|  |  | ||||||
|  | %changelog | ||||||
|  | * See: http://readthedocs.org/docs/mongoengine-odm/en/latest/changelog.html | ||||||
							
								
								
									
										1
									
								
								requirements.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								requirements.txt
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | |||||||
|  | pymongo | ||||||
							
								
								
									
										13
									
								
								setup.cfg
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								setup.cfg
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,13 @@ | |||||||
|  | [aliases] | ||||||
|  | test = nosetests | ||||||
|  |  | ||||||
|  | [nosetests] | ||||||
|  | verbosity = 2 | ||||||
|  | detailed-errors = 1 | ||||||
|  | #with-coverage = 1 | ||||||
|  | cover-html = 1 | ||||||
|  | cover-html-dir = ../htmlcov | ||||||
|  | cover-package = mongoengine | ||||||
|  | cover-erase = 1 | ||||||
|  | where = tests | ||||||
|  | #tests = test_bugfix.py | ||||||
							
								
								
									
										7
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										7
									
								
								setup.py
									
									
									
									
									
								
							| @@ -38,7 +38,9 @@ setup(name='mongoengine', | |||||||
|       packages=find_packages(), |       packages=find_packages(), | ||||||
|       author='Harry Marr', |       author='Harry Marr', | ||||||
|       author_email='harry.marr@{nospam}gmail.com', |       author_email='harry.marr@{nospam}gmail.com', | ||||||
|       url='http://hmarr.com/mongoengine/', |       maintainer="Ross Lawley", | ||||||
|  |       maintainer_email="ross.lawley@{nospam}gmail.com", | ||||||
|  |       url='http://mongoengine.org/', | ||||||
|       license='MIT', |       license='MIT', | ||||||
|       include_package_data=True, |       include_package_data=True, | ||||||
|       description=DESCRIPTION, |       description=DESCRIPTION, | ||||||
| @@ -46,6 +48,5 @@ setup(name='mongoengine', | |||||||
|       platforms=['any'], |       platforms=['any'], | ||||||
|       classifiers=CLASSIFIERS, |       classifiers=CLASSIFIERS, | ||||||
|       install_requires=['pymongo'], |       install_requires=['pymongo'], | ||||||
|       test_suite='tests', |       tests_require=['nose', 'coverage', 'blinker', 'django>=1.3', 'PIL'] | ||||||
|       tests_require=['blinker', 'django==1.3'] |  | ||||||
| ) | ) | ||||||
|   | |||||||
| @@ -1,9 +1,6 @@ | |||||||
| from datetime import datetime | from datetime import datetime | ||||||
| import pymongo |  | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| from mongoengine.base import BaseField |  | ||||||
| from mongoengine.connection import _get_db |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class PickleEmbedded(EmbeddedDocument): | class PickleEmbedded(EmbeddedDocument): | ||||||
| @@ -15,6 +12,7 @@ class PickleTest(Document): | |||||||
|     string = StringField(choices=(('One', '1'), ('Two', '2'))) |     string = StringField(choices=(('One', '1'), ('Two', '2'))) | ||||||
|     embedded = EmbeddedDocumentField(PickleEmbedded) |     embedded = EmbeddedDocumentField(PickleEmbedded) | ||||||
|     lists = ListField(StringField()) |     lists = ListField(StringField()) | ||||||
|  |     photo = FileField() | ||||||
|  |  | ||||||
|  |  | ||||||
| class Mixin(object): | class Mixin(object): | ||||||
| @@ -22,4 +20,4 @@ class Mixin(object): | |||||||
|  |  | ||||||
|  |  | ||||||
| class Base(Document): | class Base(Document): | ||||||
|     pass |     meta = {'allow_inheritance': True} | ||||||
|   | |||||||
							
								
								
									
										
											BIN
										
									
								
								tests/mongoengine.png
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tests/mongoengine.png
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 8.1 KiB | 
							
								
								
									
										98
									
								
								tests/test_connection.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										98
									
								
								tests/test_connection.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,98 @@ | |||||||
|  | import datetime | ||||||
|  | import pymongo | ||||||
|  | import unittest | ||||||
|  |  | ||||||
|  | import mongoengine.connection | ||||||
|  |  | ||||||
|  | from bson.tz_util import utc | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  | from mongoengine.connection import get_db, get_connection, ConnectionError | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class ConnectionTest(unittest.TestCase): | ||||||
|  |  | ||||||
|  |     def tearDown(self): | ||||||
|  |         mongoengine.connection._connection_settings = {} | ||||||
|  |         mongoengine.connection._connections = {} | ||||||
|  |         mongoengine.connection._dbs = {} | ||||||
|  |  | ||||||
|  |     def test_connect(self): | ||||||
|  |         """Ensure that the connect() method works properly. | ||||||
|  |         """ | ||||||
|  |         connect('mongoenginetest') | ||||||
|  |  | ||||||
|  |         conn = get_connection() | ||||||
|  |         self.assertTrue(isinstance(conn, pymongo.connection.Connection)) | ||||||
|  |  | ||||||
|  |         db = get_db() | ||||||
|  |         self.assertTrue(isinstance(db, pymongo.database.Database)) | ||||||
|  |         self.assertEqual(db.name, 'mongoenginetest') | ||||||
|  |  | ||||||
|  |         connect('mongoenginetest2', alias='testdb') | ||||||
|  |         conn = get_connection('testdb') | ||||||
|  |         self.assertTrue(isinstance(conn, pymongo.connection.Connection)) | ||||||
|  |  | ||||||
|  |     def test_connect_uri(self): | ||||||
|  |         """Ensure that the connect() method works properly with uri's | ||||||
|  |         """ | ||||||
|  |         c = connect(db='mongoenginetest', alias='admin') | ||||||
|  |         c.admin.system.users.remove({}) | ||||||
|  |         c.mongoenginetest.system.users.remove({}) | ||||||
|  |  | ||||||
|  |         c.admin.add_user("admin", "password") | ||||||
|  |         c.admin.authenticate("admin", "password") | ||||||
|  |         c.mongoenginetest.add_user("username", "password") | ||||||
|  |  | ||||||
|  |         self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost') | ||||||
|  |  | ||||||
|  |         connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest') | ||||||
|  |  | ||||||
|  |         conn = get_connection() | ||||||
|  |         self.assertTrue(isinstance(conn, pymongo.connection.Connection)) | ||||||
|  |  | ||||||
|  |         db = get_db() | ||||||
|  |         self.assertTrue(isinstance(db, pymongo.database.Database)) | ||||||
|  |         self.assertEqual(db.name, 'mongoenginetest') | ||||||
|  |  | ||||||
|  |     def test_register_connection(self): | ||||||
|  |         """Ensure that connections with different aliases may be registered. | ||||||
|  |         """ | ||||||
|  |         register_connection('testdb', 'mongoenginetest2') | ||||||
|  |  | ||||||
|  |         self.assertRaises(ConnectionError, get_connection) | ||||||
|  |         conn = get_connection('testdb') | ||||||
|  |         self.assertTrue(isinstance(conn, pymongo.connection.Connection)) | ||||||
|  |  | ||||||
|  |         db = get_db('testdb') | ||||||
|  |         self.assertTrue(isinstance(db, pymongo.database.Database)) | ||||||
|  |         self.assertEqual(db.name, 'mongoenginetest2') | ||||||
|  |  | ||||||
|  |     def test_connection_kwargs(self): | ||||||
|  |         """Ensure that connection kwargs get passed to pymongo. | ||||||
|  |         """ | ||||||
|  |         connect('mongoenginetest', alias='t1', tz_aware=True) | ||||||
|  |         conn = get_connection('t1') | ||||||
|  |  | ||||||
|  |         self.assertTrue(conn.tz_aware) | ||||||
|  |  | ||||||
|  |         connect('mongoenginetest2', alias='t2') | ||||||
|  |         conn = get_connection('t2') | ||||||
|  |         self.assertFalse(conn.tz_aware) | ||||||
|  |  | ||||||
|  |     def test_datetime(self): | ||||||
|  |         connect('mongoenginetest', tz_aware=True) | ||||||
|  |         d = datetime.datetime(2010, 5, 5, tzinfo=utc) | ||||||
|  |  | ||||||
|  |         class DateDoc(Document): | ||||||
|  |             the_date = DateTimeField(required=True) | ||||||
|  |  | ||||||
|  |         DateDoc.drop_collection() | ||||||
|  |         DateDoc(the_date=d).save() | ||||||
|  |  | ||||||
|  |         date_doc = DateDoc.objects.first() | ||||||
|  |         self.assertEqual(d, date_doc.the_date) | ||||||
|  |  | ||||||
|  |  | ||||||
|  | if __name__ == '__main__': | ||||||
|  |     unittest.main() | ||||||
| @@ -1,7 +1,7 @@ | |||||||
| import unittest | import unittest | ||||||
| 
 | 
 | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| from mongoengine.connection import _get_db | from mongoengine.connection import get_db | ||||||
| from mongoengine.tests import query_counter | from mongoengine.tests import query_counter | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @@ -9,7 +9,7 @@ class FieldTest(unittest.TestCase): | |||||||
| 
 | 
 | ||||||
|     def setUp(self): |     def setUp(self): | ||||||
|         connect(db='mongoenginetest') |         connect(db='mongoenginetest') | ||||||
|         self.db = _get_db() |         self.db = get_db() | ||||||
| 
 | 
 | ||||||
|     def test_list_item_dereference(self): |     def test_list_item_dereference(self): | ||||||
|         """Ensure that DBRef items in ListFields are dereferenced. |         """Ensure that DBRef items in ListFields are dereferenced. | ||||||
| @@ -760,3 +760,84 @@ class FieldTest(unittest.TestCase): | |||||||
|         UserB.drop_collection() |         UserB.drop_collection() | ||||||
|         UserC.drop_collection() |         UserC.drop_collection() | ||||||
|         Group.drop_collection() |         Group.drop_collection() | ||||||
|  | 
 | ||||||
|  |     def test_multidirectional_lists(self): | ||||||
|  | 
 | ||||||
|  |         class Asset(Document): | ||||||
|  |             name = StringField(max_length=250, required=True) | ||||||
|  |             parent = GenericReferenceField(default=None) | ||||||
|  |             parents = ListField(GenericReferenceField()) | ||||||
|  |             children = ListField(GenericReferenceField()) | ||||||
|  | 
 | ||||||
|  |         Asset.drop_collection() | ||||||
|  | 
 | ||||||
|  |         root = Asset(name='', path="/", title="Site Root") | ||||||
|  |         root.save() | ||||||
|  | 
 | ||||||
|  |         company = Asset(name='company', title='Company', parent=root, parents=[root]) | ||||||
|  |         company.save() | ||||||
|  | 
 | ||||||
|  |         root.children = [company] | ||||||
|  |         root.save() | ||||||
|  | 
 | ||||||
|  |         root = root.reload() | ||||||
|  |         self.assertEquals(root.children, [company]) | ||||||
|  |         self.assertEquals(company.parents, [root]) | ||||||
|  | 
 | ||||||
|  |     def test_dict_in_dbref_instance(self): | ||||||
|  | 
 | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField(max_length=250, required=True) | ||||||
|  | 
 | ||||||
|  |         class Room(Document): | ||||||
|  |             number = StringField(max_length=250, required=True) | ||||||
|  |             staffs_with_position = ListField(DictField()) | ||||||
|  | 
 | ||||||
|  |         Person.drop_collection() | ||||||
|  |         Room.drop_collection() | ||||||
|  | 
 | ||||||
|  |         bob = Person.objects.create(name='Bob') | ||||||
|  |         bob.save() | ||||||
|  |         sarah = Person.objects.create(name='Sarah') | ||||||
|  |         sarah.save() | ||||||
|  | 
 | ||||||
|  |         room_101 = Room.objects.create(number="101") | ||||||
|  |         room_101.staffs_with_position = [ | ||||||
|  |             {'position_key': 'window', 'staff': sarah}, | ||||||
|  |             {'position_key': 'door', 'staff': bob.to_dbref()}] | ||||||
|  |         room_101.save() | ||||||
|  | 
 | ||||||
|  |         room = Room.objects.first().select_related() | ||||||
|  |         self.assertEquals(room.staffs_with_position[0]['staff'], sarah) | ||||||
|  |         self.assertEquals(room.staffs_with_position[1]['staff'], bob) | ||||||
|  |      | ||||||
|  |     def test_document_reload_no_inheritance(self): | ||||||
|  |         class Foo(Document): | ||||||
|  |             meta = {'allow_inheritance': False} | ||||||
|  |             bar = ReferenceField('Bar') | ||||||
|  |             baz = ReferenceField('Baz') | ||||||
|  | 
 | ||||||
|  |         class Bar(Document): | ||||||
|  |             meta = {'allow_inheritance': False} | ||||||
|  |             msg = StringField(required=True, default='Blammo!') | ||||||
|  | 
 | ||||||
|  |         class Baz(Document): | ||||||
|  |             meta = {'allow_inheritance': False} | ||||||
|  |             msg = StringField(required=True, default='Kaboom!') | ||||||
|  | 
 | ||||||
|  |         Foo.drop_collection() | ||||||
|  |         Bar.drop_collection() | ||||||
|  |         Baz.drop_collection() | ||||||
|  | 
 | ||||||
|  |         bar = Bar() | ||||||
|  |         bar.save() | ||||||
|  |         baz = Baz() | ||||||
|  |         baz.save() | ||||||
|  |         foo = Foo() | ||||||
|  |         foo.bar = bar | ||||||
|  |         foo.baz = baz | ||||||
|  |         foo.save() | ||||||
|  |         foo.reload() | ||||||
|  | 
 | ||||||
|  |         self.assertEquals(type(foo.bar), Bar) | ||||||
|  |         self.assertEquals(type(foo.baz), Baz) | ||||||
| @@ -8,8 +8,14 @@ from mongoengine.django.shortcuts import get_document_or_404 | |||||||
| from django.http import Http404 | from django.http import Http404 | ||||||
| from django.template import Context, Template | from django.template import Context, Template | ||||||
| from django.conf import settings | from django.conf import settings | ||||||
|  | from django.core.paginator import Paginator | ||||||
|  | 
 | ||||||
| settings.configure() | settings.configure() | ||||||
| 
 | 
 | ||||||
|  | from django.contrib.sessions.tests import SessionTestsMixin | ||||||
|  | from mongoengine.django.sessions import SessionStore, MongoSession | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| class QuerySetTest(unittest.TestCase): | class QuerySetTest(unittest.TestCase): | ||||||
| 
 | 
 | ||||||
|     def setUp(self): |     def setUp(self): | ||||||
| @@ -67,3 +73,38 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         self.assertRaises(Http404, get_document_or_404, self.Person, pk='1234') |         self.assertRaises(Http404, get_document_or_404, self.Person, pk='1234') | ||||||
|         self.assertEqual(p, get_document_or_404(self.Person, pk=p.pk)) |         self.assertEqual(p, get_document_or_404(self.Person, pk=p.pk)) | ||||||
| 
 | 
 | ||||||
|  |     def test_pagination(self): | ||||||
|  |         """Ensure that Pagination works as expected | ||||||
|  |         """ | ||||||
|  |         class Page(Document): | ||||||
|  |             name = StringField() | ||||||
|  | 
 | ||||||
|  |         Page.drop_collection() | ||||||
|  | 
 | ||||||
|  |         for i in xrange(1, 11): | ||||||
|  |             Page(name=str(i)).save() | ||||||
|  | 
 | ||||||
|  |         paginator = Paginator(Page.objects.all(), 2) | ||||||
|  | 
 | ||||||
|  |         t = Template("{% for i in page.object_list  %}{{ i.name }}:{% endfor %}") | ||||||
|  |         for p in paginator.page_range: | ||||||
|  |             d = {"page": paginator.page(p)} | ||||||
|  |             end = p * 2 | ||||||
|  |             start = end - 1 | ||||||
|  |             self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase): | ||||||
|  |     backend = SessionStore | ||||||
|  | 
 | ||||||
|  |     def setUp(self): | ||||||
|  |         connect(db='mongoenginetest') | ||||||
|  |         MongoSession.drop_collection() | ||||||
|  |         super(MongoDBSessionTest, self).setUp() | ||||||
|  | 
 | ||||||
|  |     def test_first_save(self): | ||||||
|  |         session = SessionStore() | ||||||
|  |         session['test'] = True | ||||||
|  |         session.save() | ||||||
|  |         self.assertTrue('test' in session) | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										502
									
								
								tests/test_dynamic_document.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										502
									
								
								tests/test_dynamic_document.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,502 @@ | |||||||
|  | import unittest | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  | from mongoengine.connection import get_db | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class DynamicDocTest(unittest.TestCase): | ||||||
|  |  | ||||||
|  |     def setUp(self): | ||||||
|  |         connect(db='mongoenginetest') | ||||||
|  |         self.db = get_db() | ||||||
|  |  | ||||||
|  |         class Person(DynamicDocument): | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {'allow_inheritance': True} | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         self.Person = Person | ||||||
|  |  | ||||||
|  |     def test_simple_dynamic_document(self): | ||||||
|  |         """Ensures simple dynamic documents are saved correctly""" | ||||||
|  |  | ||||||
|  |         p = self.Person() | ||||||
|  |         p.name = "James" | ||||||
|  |         p.age = 34 | ||||||
|  |  | ||||||
|  |         self.assertEquals(p.to_mongo(), | ||||||
|  |             {"_types": ["Person"], "_cls": "Person", | ||||||
|  |              "name": "James", "age": 34} | ||||||
|  |         ) | ||||||
|  |  | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         self.assertEquals(self.Person.objects.first().age, 34) | ||||||
|  |  | ||||||
|  |         # Confirm no changes to self.Person | ||||||
|  |         self.assertFalse(hasattr(self.Person, 'age')) | ||||||
|  |  | ||||||
|  |     def test_dynamic_document_delta(self): | ||||||
|  |         """Ensures simple dynamic documents can delta correctly""" | ||||||
|  |         p = self.Person(name="James", age=34) | ||||||
|  |         self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {})) | ||||||
|  |  | ||||||
|  |         p.doc = 123 | ||||||
|  |         del(p.doc) | ||||||
|  |         self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {'doc': 1})) | ||||||
|  |  | ||||||
|  |     def test_change_scope_of_variable(self): | ||||||
|  |         """Test changing the scope of a dynamic field has no adverse effects""" | ||||||
|  |         p = self.Person() | ||||||
|  |         p.name = "Dean" | ||||||
|  |         p.misc = 22 | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         p = self.Person.objects.get() | ||||||
|  |         p.misc = {'hello': 'world'} | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         p = self.Person.objects.get() | ||||||
|  |         self.assertEquals(p.misc, {'hello': 'world'}) | ||||||
|  |  | ||||||
|  |     def test_delete_dynamic_field(self): | ||||||
|  |         """Test deleting a dynamic field works""" | ||||||
|  |         self.Person.drop_collection() | ||||||
|  |         p = self.Person() | ||||||
|  |         p.name = "Dean" | ||||||
|  |         p.misc = 22 | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         p = self.Person.objects.get() | ||||||
|  |         p.misc = {'hello': 'world'} | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         p = self.Person.objects.get() | ||||||
|  |         self.assertEquals(p.misc, {'hello': 'world'}) | ||||||
|  |         collection = self.db[self.Person._get_collection_name()] | ||||||
|  |         obj = collection.find_one() | ||||||
|  |         self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'misc', 'name']) | ||||||
|  |  | ||||||
|  |         del(p.misc) | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         p = self.Person.objects.get() | ||||||
|  |         self.assertFalse(hasattr(p, 'misc')) | ||||||
|  |  | ||||||
|  |         obj = collection.find_one() | ||||||
|  |         self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'name']) | ||||||
|  |  | ||||||
|  |     def test_dynamic_document_queries(self): | ||||||
|  |         """Ensure we can query dynamic fields""" | ||||||
|  |         p = self.Person() | ||||||
|  |         p.name = "Dean" | ||||||
|  |         p.age = 22 | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         self.assertEquals(1, self.Person.objects(age=22).count()) | ||||||
|  |         p = self.Person.objects(age=22) | ||||||
|  |         p = p.get() | ||||||
|  |         self.assertEquals(22, p.age) | ||||||
|  |  | ||||||
|  |     def test_complex_dynamic_document_queries(self): | ||||||
|  |         class Person(DynamicDocument): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         p = Person(name="test") | ||||||
|  |         p.age = "ten" | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         p1 = Person(name="test1") | ||||||
|  |         p1.age = "less then ten and a half" | ||||||
|  |         p1.save() | ||||||
|  |  | ||||||
|  |         p2 = Person(name="test2") | ||||||
|  |         p2.age = 10 | ||||||
|  |         p2.save() | ||||||
|  |  | ||||||
|  |         self.assertEquals(Person.objects(age__icontains='ten').count(), 2) | ||||||
|  |         self.assertEquals(Person.objects(age__gte=10).count(), 1) | ||||||
|  |  | ||||||
|  |     def test_complex_data_lookups(self): | ||||||
|  |         """Ensure you can query dynamic document dynamic fields""" | ||||||
|  |         p = self.Person() | ||||||
|  |         p.misc = {'hello': 'world'} | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         self.assertEquals(1, self.Person.objects(misc__hello='world').count()) | ||||||
|  |  | ||||||
|  |     def test_inheritance(self): | ||||||
|  |         """Ensure that dynamic document plays nice with inheritance""" | ||||||
|  |         class Employee(self.Person): | ||||||
|  |             salary = IntField() | ||||||
|  |  | ||||||
|  |         Employee.drop_collection() | ||||||
|  |  | ||||||
|  |         self.assertTrue('name' in Employee._fields) | ||||||
|  |         self.assertTrue('salary' in Employee._fields) | ||||||
|  |         self.assertEqual(Employee._get_collection_name(), | ||||||
|  |                          self.Person._get_collection_name()) | ||||||
|  |  | ||||||
|  |         joe_bloggs = Employee() | ||||||
|  |         joe_bloggs.name = "Joe Bloggs" | ||||||
|  |         joe_bloggs.salary = 10 | ||||||
|  |         joe_bloggs.age = 20 | ||||||
|  |         joe_bloggs.save() | ||||||
|  |  | ||||||
|  |         self.assertEquals(1, self.Person.objects(age=20).count()) | ||||||
|  |         self.assertEquals(1, Employee.objects(age=20).count()) | ||||||
|  |  | ||||||
|  |         joe_bloggs = self.Person.objects.first() | ||||||
|  |         self.assertTrue(isinstance(joe_bloggs, Employee)) | ||||||
|  |  | ||||||
|  |     def test_embedded_dynamic_document(self): | ||||||
|  |         """Test dynamic embedded documents""" | ||||||
|  |         class Embedded(DynamicEmbeddedDocument): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Doc(DynamicDocument): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |         doc = Doc() | ||||||
|  |  | ||||||
|  |         embedded_1 = Embedded() | ||||||
|  |         embedded_1.string_field = 'hello' | ||||||
|  |         embedded_1.int_field = 1 | ||||||
|  |         embedded_1.dict_field = {'hello': 'world'} | ||||||
|  |         embedded_1.list_field = ['1', 2, {'hello': 'world'}] | ||||||
|  |         doc.embedded_field = embedded_1 | ||||||
|  |  | ||||||
|  |         self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc", | ||||||
|  |             "embedded_field": { | ||||||
|  |                 "_types": ['Embedded'], "_cls": "Embedded", | ||||||
|  |                 "string_field": "hello", | ||||||
|  |                 "int_field": 1, | ||||||
|  |                 "dict_field": {"hello": "world"}, | ||||||
|  |                 "list_field": ['1', 2, {'hello': 'world'}] | ||||||
|  |             } | ||||||
|  |         }) | ||||||
|  |         doc.save() | ||||||
|  |  | ||||||
|  |         doc = Doc.objects.first() | ||||||
|  |         self.assertEquals(doc.embedded_field.__class__, Embedded) | ||||||
|  |         self.assertEquals(doc.embedded_field.string_field, "hello") | ||||||
|  |         self.assertEquals(doc.embedded_field.int_field, 1) | ||||||
|  |         self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'}) | ||||||
|  |         self.assertEquals(doc.embedded_field.list_field, ['1', 2, {'hello': 'world'}]) | ||||||
|  |  | ||||||
|  |     def test_complex_embedded_documents(self): | ||||||
|  |         """Test complex dynamic embedded documents setups""" | ||||||
|  |         class Embedded(DynamicEmbeddedDocument): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Doc(DynamicDocument): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |         doc = Doc() | ||||||
|  |  | ||||||
|  |         embedded_1 = Embedded() | ||||||
|  |         embedded_1.string_field = 'hello' | ||||||
|  |         embedded_1.int_field = 1 | ||||||
|  |         embedded_1.dict_field = {'hello': 'world'} | ||||||
|  |  | ||||||
|  |         embedded_2 = Embedded() | ||||||
|  |         embedded_2.string_field = 'hello' | ||||||
|  |         embedded_2.int_field = 1 | ||||||
|  |         embedded_2.dict_field = {'hello': 'world'} | ||||||
|  |         embedded_2.list_field = ['1', 2, {'hello': 'world'}] | ||||||
|  |  | ||||||
|  |         embedded_1.list_field = ['1', 2, embedded_2] | ||||||
|  |         doc.embedded_field = embedded_1 | ||||||
|  |  | ||||||
|  |         self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc", | ||||||
|  |             "embedded_field": { | ||||||
|  |                 "_types": ['Embedded'], "_cls": "Embedded", | ||||||
|  |                 "string_field": "hello", | ||||||
|  |                 "int_field": 1, | ||||||
|  |                 "dict_field": {"hello": "world"}, | ||||||
|  |                 "list_field": ['1', 2, | ||||||
|  |                     {"_types": ['Embedded'], "_cls": "Embedded", | ||||||
|  |                     "string_field": "hello", | ||||||
|  |                     "int_field": 1, | ||||||
|  |                     "dict_field": {"hello": "world"}, | ||||||
|  |                     "list_field": ['1', 2, {'hello': 'world'}]} | ||||||
|  |                 ] | ||||||
|  |             } | ||||||
|  |         }) | ||||||
|  |         doc.save() | ||||||
|  |         doc = Doc.objects.first() | ||||||
|  |         self.assertEquals(doc.embedded_field.__class__, Embedded) | ||||||
|  |         self.assertEquals(doc.embedded_field.string_field, "hello") | ||||||
|  |         self.assertEquals(doc.embedded_field.int_field, 1) | ||||||
|  |         self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'}) | ||||||
|  |         self.assertEquals(doc.embedded_field.list_field[0], '1') | ||||||
|  |         self.assertEquals(doc.embedded_field.list_field[1], 2) | ||||||
|  |  | ||||||
|  |         embedded_field = doc.embedded_field.list_field[2] | ||||||
|  |  | ||||||
|  |         self.assertEquals(embedded_field.__class__, Embedded) | ||||||
|  |         self.assertEquals(embedded_field.string_field, "hello") | ||||||
|  |         self.assertEquals(embedded_field.int_field, 1) | ||||||
|  |         self.assertEquals(embedded_field.dict_field, {'hello': 'world'}) | ||||||
|  |         self.assertEquals(embedded_field.list_field, ['1', 2, {'hello': 'world'}]) | ||||||
|  |  | ||||||
|  |     def test_delta_for_dynamic_documents(self): | ||||||
|  |         p = self.Person() | ||||||
|  |         p.name = "Dean" | ||||||
|  |         p.age = 22 | ||||||
|  |         p.save() | ||||||
|  |  | ||||||
|  |         p.age = 24 | ||||||
|  |         self.assertEquals(p.age, 24) | ||||||
|  |         self.assertEquals(p._get_changed_fields(), ['age']) | ||||||
|  |         self.assertEquals(p._delta(), ({'age': 24}, {})) | ||||||
|  |  | ||||||
|  |         p = self.Person.objects(age=22).get() | ||||||
|  |         p.age = 24 | ||||||
|  |         self.assertEquals(p.age, 24) | ||||||
|  |         self.assertEquals(p._get_changed_fields(), ['age']) | ||||||
|  |         self.assertEquals(p._delta(), ({'age': 24}, {})) | ||||||
|  |  | ||||||
|  |         p.save() | ||||||
|  |         self.assertEquals(1, self.Person.objects(age=24).count()) | ||||||
|  |  | ||||||
|  |     def test_delta(self): | ||||||
|  |  | ||||||
|  |         class Doc(DynamicDocument): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |         doc = Doc() | ||||||
|  |         doc.save() | ||||||
|  |  | ||||||
|  |         doc = Doc.objects.first() | ||||||
|  |         self.assertEquals(doc._get_changed_fields(), []) | ||||||
|  |         self.assertEquals(doc._delta(), ({}, {})) | ||||||
|  |  | ||||||
|  |         doc.string_field = 'hello' | ||||||
|  |         self.assertEquals(doc._get_changed_fields(), ['string_field']) | ||||||
|  |         self.assertEquals(doc._delta(), ({'string_field': 'hello'}, {})) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         doc.int_field = 1 | ||||||
|  |         self.assertEquals(doc._get_changed_fields(), ['int_field']) | ||||||
|  |         self.assertEquals(doc._delta(), ({'int_field': 1}, {})) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         dict_value = {'hello': 'world', 'ping': 'pong'} | ||||||
|  |         doc.dict_field = dict_value | ||||||
|  |         self.assertEquals(doc._get_changed_fields(), ['dict_field']) | ||||||
|  |         self.assertEquals(doc._delta(), ({'dict_field': dict_value}, {})) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         list_value = ['1', 2, {'hello': 'world'}] | ||||||
|  |         doc.list_field = list_value | ||||||
|  |         self.assertEquals(doc._get_changed_fields(), ['list_field']) | ||||||
|  |         self.assertEquals(doc._delta(), ({'list_field': list_value}, {})) | ||||||
|  |  | ||||||
|  |         # Test unsetting | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         doc.dict_field = {} | ||||||
|  |         self.assertEquals(doc._get_changed_fields(), ['dict_field']) | ||||||
|  |         self.assertEquals(doc._delta(), ({}, {'dict_field': 1})) | ||||||
|  |  | ||||||
|  |         doc._changed_fields = [] | ||||||
|  |         doc.list_field = [] | ||||||
|  |         self.assertEquals(doc._get_changed_fields(), ['list_field']) | ||||||
|  |         self.assertEquals(doc._delta(), ({}, {'list_field': 1})) | ||||||
|  |  | ||||||
|  |     def test_delta_recursive(self): | ||||||
|  |         """Testing deltaing works with dynamic documents""" | ||||||
|  |         class Embedded(DynamicEmbeddedDocument): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Doc(DynamicDocument): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         Doc.drop_collection() | ||||||
|  |         doc = Doc() | ||||||
|  |         doc.save() | ||||||
|  |  | ||||||
|  |         doc = Doc.objects.first() | ||||||
|  |         self.assertEquals(doc._get_changed_fields(), []) | ||||||
|  |         self.assertEquals(doc._delta(), ({}, {})) | ||||||
|  |  | ||||||
|  |         embedded_1 = Embedded() | ||||||
|  |         embedded_1.string_field = 'hello' | ||||||
|  |         embedded_1.int_field = 1 | ||||||
|  |         embedded_1.dict_field = {'hello': 'world'} | ||||||
|  |         embedded_1.list_field = ['1', 2, {'hello': 'world'}] | ||||||
|  |         doc.embedded_field = embedded_1 | ||||||
|  |  | ||||||
|  |         self.assertEquals(doc._get_changed_fields(), ['embedded_field']) | ||||||
|  |  | ||||||
|  |         embedded_delta = { | ||||||
|  |             'string_field': 'hello', | ||||||
|  |             'int_field': 1, | ||||||
|  |             'dict_field': {'hello': 'world'}, | ||||||
|  |             'list_field': ['1', 2, {'hello': 'world'}] | ||||||
|  |         } | ||||||
|  |         self.assertEquals(doc.embedded_field._delta(), (embedded_delta, {})) | ||||||
|  |         embedded_delta.update({ | ||||||
|  |             '_types': ['Embedded'], | ||||||
|  |             '_cls': 'Embedded', | ||||||
|  |         }) | ||||||
|  |         self.assertEquals(doc._delta(), ({'embedded_field': embedded_delta}, {})) | ||||||
|  |  | ||||||
|  |         doc.save() | ||||||
|  |         doc.reload() | ||||||
|  |  | ||||||
|  |         doc.embedded_field.dict_field = {} | ||||||
|  |         self.assertEquals(doc._get_changed_fields(), ['embedded_field.dict_field']) | ||||||
|  |         self.assertEquals(doc.embedded_field._delta(), ({}, {'dict_field': 1})) | ||||||
|  |  | ||||||
|  |         self.assertEquals(doc._delta(), ({}, {'embedded_field.dict_field': 1})) | ||||||
|  |         doc.save() | ||||||
|  |         doc.reload() | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field = [] | ||||||
|  |         self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field']) | ||||||
|  |         self.assertEquals(doc.embedded_field._delta(), ({}, {'list_field': 1})) | ||||||
|  |         self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field': 1})) | ||||||
|  |         doc.save() | ||||||
|  |         doc.reload() | ||||||
|  |  | ||||||
|  |         embedded_2 = Embedded() | ||||||
|  |         embedded_2.string_field = 'hello' | ||||||
|  |         embedded_2.int_field = 1 | ||||||
|  |         embedded_2.dict_field = {'hello': 'world'} | ||||||
|  |         embedded_2.list_field = ['1', 2, {'hello': 'world'}] | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field = ['1', 2, embedded_2] | ||||||
|  |         self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field']) | ||||||
|  |         self.assertEquals(doc.embedded_field._delta(), ({ | ||||||
|  |             'list_field': ['1', 2, { | ||||||
|  |                 '_cls': 'Embedded', | ||||||
|  |                 '_types': ['Embedded'], | ||||||
|  |                 'string_field': 'hello', | ||||||
|  |                 'dict_field': {'hello': 'world'}, | ||||||
|  |                 'int_field': 1, | ||||||
|  |                 'list_field': ['1', 2, {'hello': 'world'}], | ||||||
|  |             }] | ||||||
|  |         }, {})) | ||||||
|  |  | ||||||
|  |         self.assertEquals(doc._delta(), ({ | ||||||
|  |             'embedded_field.list_field': ['1', 2, { | ||||||
|  |                 '_cls': 'Embedded', | ||||||
|  |                  '_types': ['Embedded'], | ||||||
|  |                  'string_field': 'hello', | ||||||
|  |                  'dict_field': {'hello': 'world'}, | ||||||
|  |                  'int_field': 1, | ||||||
|  |                  'list_field': ['1', 2, {'hello': 'world'}], | ||||||
|  |             }] | ||||||
|  |         }, {})) | ||||||
|  |         doc.save() | ||||||
|  |         doc.reload() | ||||||
|  |  | ||||||
|  |         self.assertEquals(doc.embedded_field.list_field[2]._changed_fields, []) | ||||||
|  |         self.assertEquals(doc.embedded_field.list_field[0], '1') | ||||||
|  |         self.assertEquals(doc.embedded_field.list_field[1], 2) | ||||||
|  |         for k in doc.embedded_field.list_field[2]._fields: | ||||||
|  |             self.assertEquals(doc.embedded_field.list_field[2][k], embedded_2[k]) | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field[2].string_field = 'world' | ||||||
|  |         self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field']) | ||||||
|  |         self.assertEquals(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {})) | ||||||
|  |         self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {})) | ||||||
|  |         doc.save() | ||||||
|  |         doc.reload() | ||||||
|  |         self.assertEquals(doc.embedded_field.list_field[2].string_field, 'world') | ||||||
|  |  | ||||||
|  |         # Test multiple assignments | ||||||
|  |         doc.embedded_field.list_field[2].string_field = 'hello world' | ||||||
|  |         doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] | ||||||
|  |         self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field']) | ||||||
|  |         self.assertEquals(doc.embedded_field._delta(), ({ | ||||||
|  |             'list_field': ['1', 2, { | ||||||
|  |             '_types': ['Embedded'], | ||||||
|  |             '_cls': 'Embedded', | ||||||
|  |             'string_field': 'hello world', | ||||||
|  |             'int_field': 1, | ||||||
|  |             'list_field': ['1', 2, {'hello': 'world'}], | ||||||
|  |             'dict_field': {'hello': 'world'}}]}, {})) | ||||||
|  |         self.assertEquals(doc._delta(), ({ | ||||||
|  |             'embedded_field.list_field': ['1', 2, { | ||||||
|  |                 '_types': ['Embedded'], | ||||||
|  |                 '_cls': 'Embedded', | ||||||
|  |                 'string_field': 'hello world', | ||||||
|  |                 'int_field': 1, | ||||||
|  |                 'list_field': ['1', 2, {'hello': 'world'}], | ||||||
|  |                 'dict_field': {'hello': 'world'}} | ||||||
|  |             ]}, {})) | ||||||
|  |         doc.save() | ||||||
|  |         doc.reload() | ||||||
|  |         self.assertEquals(doc.embedded_field.list_field[2].string_field, 'hello world') | ||||||
|  |  | ||||||
|  |         # Test list native methods | ||||||
|  |         doc.embedded_field.list_field[2].list_field.pop(0) | ||||||
|  |         self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {})) | ||||||
|  |         doc.save() | ||||||
|  |         doc.reload() | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field[2].list_field.append(1) | ||||||
|  |         self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {})) | ||||||
|  |         doc.save() | ||||||
|  |         doc.reload() | ||||||
|  |         self.assertEquals(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1]) | ||||||
|  |  | ||||||
|  |         doc.embedded_field.list_field[2].list_field.sort() | ||||||
|  |         doc.save() | ||||||
|  |         doc.reload() | ||||||
|  |         self.assertEquals(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}]) | ||||||
|  |  | ||||||
|  |         del(doc.embedded_field.list_field[2].list_field[2]['hello']) | ||||||
|  |         self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {})) | ||||||
|  |         doc.save() | ||||||
|  |         doc.reload() | ||||||
|  |  | ||||||
|  |         del(doc.embedded_field.list_field[2].list_field) | ||||||
|  |         self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1})) | ||||||
|  |  | ||||||
|  |         doc.save() | ||||||
|  |         doc.reload() | ||||||
|  |  | ||||||
|  |         doc.dict_field = {'embedded': embedded_1} | ||||||
|  |         doc.save() | ||||||
|  |         doc.reload() | ||||||
|  |  | ||||||
|  |         doc.dict_field['embedded'].string_field = 'Hello World' | ||||||
|  |         self.assertEquals(doc._get_changed_fields(), ['dict_field.embedded.string_field']) | ||||||
|  |         self.assertEquals(doc._delta(), ({'dict_field.embedded.string_field': 'Hello World'}, {})) | ||||||
|  |  | ||||||
|  |     def test_indexes(self): | ||||||
|  |         """Ensure that indexes are used when meta[indexes] is specified. | ||||||
|  |         """ | ||||||
|  |         class BlogPost(DynamicDocument): | ||||||
|  |             meta = { | ||||||
|  |                 'indexes': [ | ||||||
|  |                     '-date', | ||||||
|  |                     ('category', '-date') | ||||||
|  |                 ], | ||||||
|  |             } | ||||||
|  |  | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         info = BlogPost.objects._collection.index_information() | ||||||
|  |         # _id, '-date', ('cat', 'date') | ||||||
|  |         # NB: there is no index on _types by itself, since | ||||||
|  |         # the indices on -date and tags will both contain | ||||||
|  |         # _types as first element in the key | ||||||
|  |         self.assertEqual(len(info), 3) | ||||||
|  |  | ||||||
|  |         # Indexes are lazy so use list() to perform query | ||||||
|  |         list(BlogPost.objects) | ||||||
|  |         info = BlogPost.objects._collection.index_information() | ||||||
|  |         info = [value['key'] for key, value in info.iteritems()] | ||||||
|  |         self.assertTrue([('_types', 1), ('category', 1), ('date', -1)] | ||||||
|  |                         in info) | ||||||
|  |         self.assertTrue([('_types', 1), ('date', -1)] in info) | ||||||
| @@ -1,20 +1,29 @@ | |||||||
| import unittest |  | ||||||
| import datetime | import datetime | ||||||
| from decimal import Decimal | import os | ||||||
| 
 | import unittest | ||||||
| import pymongo | import uuid | ||||||
|  | import StringIO | ||||||
|  | import tempfile | ||||||
| import gridfs | import gridfs | ||||||
| 
 | 
 | ||||||
|  | from decimal import Decimal | ||||||
|  | 
 | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| from mongoengine.connection import _get_db | from mongoengine.connection import get_db | ||||||
| from mongoengine.base import _document_registry, NotRegistered | from mongoengine.base import _document_registry, NotRegistered | ||||||
| 
 | 
 | ||||||
|  | TEST_IMAGE_PATH = os.path.join(os.path.dirname(__file__), 'mongoengine.png') | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
| class FieldTest(unittest.TestCase): | class FieldTest(unittest.TestCase): | ||||||
| 
 | 
 | ||||||
|     def setUp(self): |     def setUp(self): | ||||||
|         connect(db='mongoenginetest') |         connect(db='mongoenginetest') | ||||||
|         self.db = _get_db() |         self.db = get_db() | ||||||
|  | 
 | ||||||
|  |     def tearDown(self): | ||||||
|  |         self.db.drop_collection('fs.files') | ||||||
|  |         self.db.drop_collection('fs.chunks') | ||||||
| 
 | 
 | ||||||
|     def test_default_values(self): |     def test_default_values(self): | ||||||
|         """Ensure that default field values are used when creating a document. |         """Ensure that default field values are used when creating a document. | ||||||
| @@ -44,6 +53,80 @@ class FieldTest(unittest.TestCase): | |||||||
|         person = Person(age=30) |         person = Person(age=30) | ||||||
|         self.assertRaises(ValidationError, person.validate) |         self.assertRaises(ValidationError, person.validate) | ||||||
| 
 | 
 | ||||||
|  |     def test_not_required_handles_none_in_update(self): | ||||||
|  |         """Ensure that every fields should accept None if required is False. | ||||||
|  |         """ | ||||||
|  | 
 | ||||||
|  |         class HandleNoneFields(Document): | ||||||
|  |             str_fld = StringField() | ||||||
|  |             int_fld = IntField() | ||||||
|  |             flt_fld = FloatField() | ||||||
|  |             comp_dt_fld = ComplexDateTimeField() | ||||||
|  | 
 | ||||||
|  |         HandleNoneFields.drop_collection() | ||||||
|  | 
 | ||||||
|  |         doc = HandleNoneFields() | ||||||
|  |         doc.str_fld = u'spam ham egg' | ||||||
|  |         doc.int_fld = 42 | ||||||
|  |         doc.flt_fld = 4.2 | ||||||
|  |         doc.com_dt_fld = datetime.datetime.utcnow() | ||||||
|  |         doc.save() | ||||||
|  | 
 | ||||||
|  |         res = HandleNoneFields.objects(id=doc.id).update( | ||||||
|  |             set__str_fld=None, | ||||||
|  |             set__int_fld=None, | ||||||
|  |             set__flt_fld=None, | ||||||
|  |             set__comp_dt_fld=None, | ||||||
|  |         ) | ||||||
|  |         self.assertEqual(res, 1) | ||||||
|  | 
 | ||||||
|  |         # Retrive data from db and verify it. | ||||||
|  |         ret = HandleNoneFields.objects.all()[0] | ||||||
|  |         self.assertEqual(ret.str_fld, None) | ||||||
|  |         self.assertEqual(ret.int_fld, None) | ||||||
|  |         self.assertEqual(ret.flt_fld, None) | ||||||
|  | 
 | ||||||
|  |         # Return current time if retrived value is None. | ||||||
|  |         self.assertTrue(isinstance(ret.comp_dt_fld, datetime.datetime)) | ||||||
|  | 
 | ||||||
|  |     def test_not_required_handles_none_from_database(self): | ||||||
|  |         """Ensure that every fields can handle null values from the database. | ||||||
|  |         """ | ||||||
|  | 
 | ||||||
|  |         class HandleNoneFields(Document): | ||||||
|  |             str_fld = StringField(required=True) | ||||||
|  |             int_fld = IntField(required=True) | ||||||
|  |             flt_fld = FloatField(required=True) | ||||||
|  |             comp_dt_fld = ComplexDateTimeField(required=True) | ||||||
|  | 
 | ||||||
|  |         HandleNoneFields.drop_collection() | ||||||
|  | 
 | ||||||
|  |         doc = HandleNoneFields() | ||||||
|  |         doc.str_fld = u'spam ham egg' | ||||||
|  |         doc.int_fld = 42 | ||||||
|  |         doc.flt_fld = 4.2 | ||||||
|  |         doc.com_dt_fld = datetime.datetime.utcnow() | ||||||
|  |         doc.save() | ||||||
|  | 
 | ||||||
|  |         collection = self.db[HandleNoneFields._get_collection_name()] | ||||||
|  |         obj = collection.update({"_id": doc.id}, {"$unset": { | ||||||
|  |             "str_fld": 1, | ||||||
|  |             "int_fld": 1, | ||||||
|  |             "flt_fld": 1, | ||||||
|  |             "comp_dt_fld": 1} | ||||||
|  |         }) | ||||||
|  | 
 | ||||||
|  |         # Retrive data from db and verify it. | ||||||
|  |         ret = HandleNoneFields.objects.all()[0] | ||||||
|  | 
 | ||||||
|  |         self.assertEqual(ret.str_fld, None) | ||||||
|  |         self.assertEqual(ret.int_fld, None) | ||||||
|  |         self.assertEqual(ret.flt_fld, None) | ||||||
|  |         # Return current time if retrived value is None. | ||||||
|  |         self.assert_(isinstance(ret.comp_dt_fld, datetime.datetime)) | ||||||
|  | 
 | ||||||
|  |         self.assertRaises(ValidationError, ret.validate) | ||||||
|  | 
 | ||||||
|     def test_object_id_validation(self): |     def test_object_id_validation(self): | ||||||
|         """Ensure that invalid values cannot be assigned to string fields. |         """Ensure that invalid values cannot be assigned to string fields. | ||||||
|         """ |         """ | ||||||
| @@ -175,6 +258,26 @@ class FieldTest(unittest.TestCase): | |||||||
|         person.admin = 'Yes' |         person.admin = 'Yes' | ||||||
|         self.assertRaises(ValidationError, person.validate) |         self.assertRaises(ValidationError, person.validate) | ||||||
| 
 | 
 | ||||||
|  |     def test_uuid_validation(self): | ||||||
|  |         """Ensure that invalid values cannot be assigned to UUID fields. | ||||||
|  |         """ | ||||||
|  |         class Person(Document): | ||||||
|  |             api_key = UUIDField() | ||||||
|  | 
 | ||||||
|  |         person = Person() | ||||||
|  |         # any uuid type is valid | ||||||
|  |         person.api_key = uuid.uuid4() | ||||||
|  |         person.validate() | ||||||
|  |         person.api_key = uuid.uuid1() | ||||||
|  |         person.validate() | ||||||
|  | 
 | ||||||
|  |         # last g cannot belong to an hex number | ||||||
|  |         person.api_key = '9d159858-549b-4975-9f98-dd2f987c113g' | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |         # short strings don't validate | ||||||
|  |         person.api_key = '9d159858-549b-4975-9f98-dd2f987c113' | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  | 
 | ||||||
|     def test_datetime_validation(self): |     def test_datetime_validation(self): | ||||||
|         """Ensure that invalid values cannot be assigned to datetime fields. |         """Ensure that invalid values cannot be assigned to datetime fields. | ||||||
|         """ |         """ | ||||||
| @@ -337,27 +440,27 @@ class FieldTest(unittest.TestCase): | |||||||
|         logs = LogEntry.objects.order_by("date") |         logs = LogEntry.objects.order_by("date") | ||||||
|         count = logs.count() |         count = logs.count() | ||||||
|         i = 0 |         i = 0 | ||||||
|         while i == count-1: |         while i == count - 1: | ||||||
|             self.assertTrue(logs[i].date <= logs[i+1].date) |             self.assertTrue(logs[i].date <= logs[i + 1].date) | ||||||
|             i +=1 |             i += 1 | ||||||
| 
 | 
 | ||||||
|         logs = LogEntry.objects.order_by("-date") |         logs = LogEntry.objects.order_by("-date") | ||||||
|         count = logs.count() |         count = logs.count() | ||||||
|         i = 0 |         i = 0 | ||||||
|         while i == count-1: |         while i == count - 1: | ||||||
|             self.assertTrue(logs[i].date >= logs[i+1].date) |             self.assertTrue(logs[i].date >= logs[i + 1].date) | ||||||
|             i +=1 |             i += 1 | ||||||
| 
 | 
 | ||||||
|         # Test searching |         # Test searching | ||||||
|         logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980,1,1)) |         logs = LogEntry.objects.filter(date__gte=datetime.datetime(1980, 1, 1)) | ||||||
|         self.assertEqual(logs.count(), 30) |         self.assertEqual(logs.count(), 30) | ||||||
| 
 | 
 | ||||||
|         logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980,1,1)) |         logs = LogEntry.objects.filter(date__lte=datetime.datetime(1980, 1, 1)) | ||||||
|         self.assertEqual(logs.count(), 30) |         self.assertEqual(logs.count(), 30) | ||||||
| 
 | 
 | ||||||
|         logs = LogEntry.objects.filter( |         logs = LogEntry.objects.filter( | ||||||
|             date__lte=datetime.datetime(2011,1,1), |             date__lte=datetime.datetime(2011, 1, 1), | ||||||
|             date__gte=datetime.datetime(2000,1,1), |             date__gte=datetime.datetime(2000, 1, 1), | ||||||
|         ) |         ) | ||||||
|         self.assertEqual(logs.count(), 10) |         self.assertEqual(logs.count(), 10) | ||||||
| 
 | 
 | ||||||
| @@ -459,6 +562,31 @@ class FieldTest(unittest.TestCase): | |||||||
| 
 | 
 | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
| 
 | 
 | ||||||
|  |     def test_reverse_list_sorting(self): | ||||||
|  |         '''Ensure that a reverse sorted list field properly sorts values''' | ||||||
|  | 
 | ||||||
|  |         class Category(EmbeddedDocument): | ||||||
|  |             count = IntField() | ||||||
|  |             name = StringField() | ||||||
|  | 
 | ||||||
|  |         class CategoryList(Document): | ||||||
|  |             categories = SortedListField(EmbeddedDocumentField(Category), ordering='count', reverse=True) | ||||||
|  |             name = StringField() | ||||||
|  | 
 | ||||||
|  |         catlist = CategoryList(name="Top categories") | ||||||
|  |         cat1 = Category(name='posts', count=10) | ||||||
|  |         cat2 = Category(name='food', count=100) | ||||||
|  |         cat3 = Category(name='drink', count=40) | ||||||
|  |         catlist.categories = [cat1, cat2, cat3] | ||||||
|  |         catlist.save() | ||||||
|  |         catlist.reload() | ||||||
|  | 
 | ||||||
|  |         self.assertEqual(catlist.categories[0].name, cat2.name) | ||||||
|  |         self.assertEqual(catlist.categories[1].name, cat3.name) | ||||||
|  |         self.assertEqual(catlist.categories[2].name, cat1.name) | ||||||
|  | 
 | ||||||
|  |         CategoryList.drop_collection() | ||||||
|  | 
 | ||||||
|     def test_list_field(self): |     def test_list_field(self): | ||||||
|         """Ensure that list types work as expected. |         """Ensure that list types work as expected. | ||||||
|         """ |         """ | ||||||
| @@ -485,7 +613,6 @@ class FieldTest(unittest.TestCase): | |||||||
|         post.info = [{'test': 3}] |         post.info = [{'test': 3}] | ||||||
|         post.save() |         post.save() | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
|         self.assertEquals(BlogPost.objects.count(), 3) |         self.assertEquals(BlogPost.objects.count(), 3) | ||||||
|         self.assertEquals(BlogPost.objects.filter(info__exact='test').count(), 1) |         self.assertEquals(BlogPost.objects.filter(info__exact='test').count(), 1) | ||||||
|         self.assertEquals(BlogPost.objects.filter(info__0__test='test').count(), 1) |         self.assertEquals(BlogPost.objects.filter(info__0__test='test').count(), 1) | ||||||
| @@ -495,6 +622,21 @@ class FieldTest(unittest.TestCase): | |||||||
|         self.assertEquals(BlogPost.objects.filter(info__100__test__exact='test').count(), 0) |         self.assertEquals(BlogPost.objects.filter(info__100__test__exact='test').count(), 0) | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
| 
 | 
 | ||||||
|  |     def test_list_field_passed_in_value(self): | ||||||
|  |         class Foo(Document): | ||||||
|  |             bars = ListField(ReferenceField("Bar")) | ||||||
|  | 
 | ||||||
|  |         class Bar(Document): | ||||||
|  |             text = StringField() | ||||||
|  | 
 | ||||||
|  |         bar = Bar(text="hi") | ||||||
|  |         bar.save() | ||||||
|  | 
 | ||||||
|  |         foo = Foo(bars=[]) | ||||||
|  |         foo.bars.append(bar) | ||||||
|  |         self.assertEquals(repr(foo.bars), '[<Bar: Bar object>]') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|     def test_list_field_strict(self): |     def test_list_field_strict(self): | ||||||
|         """Ensure that list field handles validation if provided a strict field type.""" |         """Ensure that list field handles validation if provided a strict field type.""" | ||||||
| 
 | 
 | ||||||
| @@ -515,6 +657,39 @@ class FieldTest(unittest.TestCase): | |||||||
| 
 | 
 | ||||||
|         Simple.drop_collection() |         Simple.drop_collection() | ||||||
| 
 | 
 | ||||||
|  |     def test_list_field_rejects_strings(self): | ||||||
|  |         """Strings aren't valid list field data types""" | ||||||
|  | 
 | ||||||
|  |         class Simple(Document): | ||||||
|  |             mapping = ListField() | ||||||
|  | 
 | ||||||
|  |         Simple.drop_collection() | ||||||
|  |         e = Simple() | ||||||
|  |         e.mapping = 'hello world' | ||||||
|  | 
 | ||||||
|  |         self.assertRaises(ValidationError, e.save) | ||||||
|  | 
 | ||||||
|  |     def test_complex_field_required(self): | ||||||
|  |         """Ensure required cant be None / Empty""" | ||||||
|  | 
 | ||||||
|  |         class Simple(Document): | ||||||
|  |             mapping = ListField(required=True) | ||||||
|  | 
 | ||||||
|  |         Simple.drop_collection() | ||||||
|  |         e = Simple() | ||||||
|  |         e.mapping = [] | ||||||
|  | 
 | ||||||
|  |         self.assertRaises(ValidationError, e.save) | ||||||
|  | 
 | ||||||
|  |         class Simple(Document): | ||||||
|  |             mapping = DictField(required=True) | ||||||
|  | 
 | ||||||
|  |         Simple.drop_collection() | ||||||
|  |         e = Simple() | ||||||
|  |         e.mapping = {} | ||||||
|  | 
 | ||||||
|  |         self.assertRaises(ValidationError, e.save) | ||||||
|  | 
 | ||||||
|     def test_list_field_complex(self): |     def test_list_field_complex(self): | ||||||
|         """Ensure that the list fields can handle the complex types.""" |         """Ensure that the list fields can handle the complex types.""" | ||||||
| 
 | 
 | ||||||
| @@ -582,6 +757,9 @@ class FieldTest(unittest.TestCase): | |||||||
|         post.info = {'the.title': 'test'} |         post.info = {'the.title': 'test'} | ||||||
|         self.assertRaises(ValidationError, post.validate) |         self.assertRaises(ValidationError, post.validate) | ||||||
| 
 | 
 | ||||||
|  |         post.info = {1: 'test'} | ||||||
|  |         self.assertRaises(ValidationError, post.validate) | ||||||
|  | 
 | ||||||
|         post.info = {'title': 'test'} |         post.info = {'title': 'test'} | ||||||
|         post.save() |         post.save() | ||||||
| 
 | 
 | ||||||
| @@ -600,6 +778,13 @@ class FieldTest(unittest.TestCase): | |||||||
|         # Confirm handles non strings or non existing keys |         # Confirm handles non strings or non existing keys | ||||||
|         self.assertEquals(BlogPost.objects.filter(info__details__test__exact=5).count(), 0) |         self.assertEquals(BlogPost.objects.filter(info__details__test__exact=5).count(), 0) | ||||||
|         self.assertEquals(BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0) |         self.assertEquals(BlogPost.objects.filter(info__made_up__test__exact='test').count(), 0) | ||||||
|  | 
 | ||||||
|  |         post = BlogPost.objects.create(info={'title': 'original'}) | ||||||
|  |         post.info.update({'title': 'updated'}) | ||||||
|  |         post.save() | ||||||
|  |         post.reload() | ||||||
|  |         self.assertEquals('updated', post.info['title']) | ||||||
|  | 
 | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
| 
 | 
 | ||||||
|     def test_dictfield_strict(self): |     def test_dictfield_strict(self): | ||||||
| @@ -727,6 +912,48 @@ class FieldTest(unittest.TestCase): | |||||||
| 
 | 
 | ||||||
|         Extensible.drop_collection() |         Extensible.drop_collection() | ||||||
| 
 | 
 | ||||||
|  |     def test_embedded_mapfield_db_field(self): | ||||||
|  | 
 | ||||||
|  |         class Embedded(EmbeddedDocument): | ||||||
|  |             number = IntField(default=0, db_field='i') | ||||||
|  | 
 | ||||||
|  |         class Test(Document): | ||||||
|  |             my_map = MapField(field=EmbeddedDocumentField(Embedded), db_field='x') | ||||||
|  | 
 | ||||||
|  |         Test.drop_collection() | ||||||
|  | 
 | ||||||
|  |         test = Test() | ||||||
|  |         test.my_map['DICTIONARY_KEY'] = Embedded(number=1) | ||||||
|  |         test.save() | ||||||
|  | 
 | ||||||
|  |         Test.objects.update_one(inc__my_map__DICTIONARY_KEY__number=1) | ||||||
|  | 
 | ||||||
|  |         test = Test.objects.get() | ||||||
|  |         self.assertEqual(test.my_map['DICTIONARY_KEY'].number, 2) | ||||||
|  |         doc = self.db.test.find_one() | ||||||
|  |         self.assertEqual(doc['x']['DICTIONARY_KEY']['i'], 2) | ||||||
|  | 
 | ||||||
|  |     def test_embedded_db_field(self): | ||||||
|  | 
 | ||||||
|  |         class Embedded(EmbeddedDocument): | ||||||
|  |             number = IntField(default=0, db_field='i') | ||||||
|  | 
 | ||||||
|  |         class Test(Document): | ||||||
|  |             embedded = EmbeddedDocumentField(Embedded, db_field='x') | ||||||
|  | 
 | ||||||
|  |         Test.drop_collection() | ||||||
|  | 
 | ||||||
|  |         test = Test() | ||||||
|  |         test.embedded = Embedded(number=1) | ||||||
|  |         test.save() | ||||||
|  | 
 | ||||||
|  |         Test.objects.update_one(inc__embedded__number=1) | ||||||
|  | 
 | ||||||
|  |         test = Test.objects.get() | ||||||
|  |         self.assertEqual(test.embedded.number, 2) | ||||||
|  |         doc = self.db.test.find_one() | ||||||
|  |         self.assertEqual(doc['x']['i'], 2) | ||||||
|  | 
 | ||||||
|     def test_embedded_document_validation(self): |     def test_embedded_document_validation(self): | ||||||
|         """Ensure that invalid embedded documents cannot be assigned to |         """Ensure that invalid embedded documents cannot be assigned to | ||||||
|         embedded document fields. |         embedded document fields. | ||||||
| @@ -942,15 +1169,29 @@ class FieldTest(unittest.TestCase): | |||||||
|         class Company(Document): |         class Company(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
| 
 | 
 | ||||||
|  |         Product.drop_collection() | ||||||
|  |         Company.drop_collection() | ||||||
|  | 
 | ||||||
|         ten_gen = Company(name='10gen') |         ten_gen = Company(name='10gen') | ||||||
|         ten_gen.save() |         ten_gen.save() | ||||||
|         mongodb = Product(name='MongoDB', company=ten_gen) |         mongodb = Product(name='MongoDB', company=ten_gen) | ||||||
|         mongodb.save() |         mongodb.save() | ||||||
| 
 | 
 | ||||||
|  |         me = Product(name='MongoEngine') | ||||||
|  |         me.save() | ||||||
|  | 
 | ||||||
|         obj = Product.objects(company=ten_gen).first() |         obj = Product.objects(company=ten_gen).first() | ||||||
|         self.assertEqual(obj, mongodb) |         self.assertEqual(obj, mongodb) | ||||||
|         self.assertEqual(obj.company, ten_gen) |         self.assertEqual(obj.company, ten_gen) | ||||||
| 
 | 
 | ||||||
|  |         obj = Product.objects(company=None).first() | ||||||
|  |         self.assertEqual(obj, me) | ||||||
|  | 
 | ||||||
|  |         obj, created = Product.objects.get_or_create(company=None) | ||||||
|  | 
 | ||||||
|  |         self.assertEqual(created, False) | ||||||
|  |         self.assertEqual(obj, me) | ||||||
|  | 
 | ||||||
|     def test_reference_query_conversion(self): |     def test_reference_query_conversion(self): | ||||||
|         """Ensure that ReferenceFields can be queried using objects and values |         """Ensure that ReferenceFields can be queried using objects and values | ||||||
|         of the type of the primary key of the referenced object. |         of the type of the primary key of the referenced object. | ||||||
| @@ -1062,7 +1303,6 @@ class FieldTest(unittest.TestCase): | |||||||
|         Post.drop_collection() |         Post.drop_collection() | ||||||
|         User.drop_collection() |         User.drop_collection() | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
|     def test_generic_reference_document_not_registered(self): |     def test_generic_reference_document_not_registered(self): | ||||||
|         """Ensure dereferencing out of the document registry throws a |         """Ensure dereferencing out of the document registry throws a | ||||||
|         `NotRegistered` error. |         `NotRegistered` error. | ||||||
| @@ -1089,7 +1329,7 @@ class FieldTest(unittest.TestCase): | |||||||
|         user = User.objects.first() |         user = User.objects.first() | ||||||
|         try: |         try: | ||||||
|             user.bookmarks |             user.bookmarks | ||||||
|             raise AssertionError, "Link was removed from the registry" |             raise AssertionError("Link was removed from the registry") | ||||||
|         except NotRegistered: |         except NotRegistered: | ||||||
|             pass |             pass | ||||||
| 
 | 
 | ||||||
| @@ -1108,6 +1348,74 @@ class FieldTest(unittest.TestCase): | |||||||
|         self.assertEquals(repr(Person.objects(city=None)), |         self.assertEquals(repr(Person.objects(city=None)), | ||||||
|                             "[<Person: Person object>]") |                             "[<Person: Person object>]") | ||||||
| 
 | 
 | ||||||
|  | 
 | ||||||
|  |     def test_generic_reference_choices(self): | ||||||
|  |         """Ensure that a GenericReferenceField can handle choices | ||||||
|  |         """ | ||||||
|  |         class Link(Document): | ||||||
|  |             title = StringField() | ||||||
|  | 
 | ||||||
|  |         class Post(Document): | ||||||
|  |             title = StringField() | ||||||
|  | 
 | ||||||
|  |         class Bookmark(Document): | ||||||
|  |             bookmark_object = GenericReferenceField(choices=(Post,)) | ||||||
|  | 
 | ||||||
|  |         Link.drop_collection() | ||||||
|  |         Post.drop_collection() | ||||||
|  |         Bookmark.drop_collection() | ||||||
|  | 
 | ||||||
|  |         link_1 = Link(title="Pitchfork") | ||||||
|  |         link_1.save() | ||||||
|  | 
 | ||||||
|  |         post_1 = Post(title="Behind the Scenes of the Pavement Reunion") | ||||||
|  |         post_1.save() | ||||||
|  | 
 | ||||||
|  |         bm = Bookmark(bookmark_object=link_1) | ||||||
|  |         self.assertRaises(ValidationError, bm.validate) | ||||||
|  | 
 | ||||||
|  |         bm = Bookmark(bookmark_object=post_1) | ||||||
|  |         bm.save() | ||||||
|  | 
 | ||||||
|  |         bm = Bookmark.objects.first() | ||||||
|  |         self.assertEqual(bm.bookmark_object, post_1) | ||||||
|  | 
 | ||||||
|  |     def test_generic_reference_list_choices(self): | ||||||
|  |         """Ensure that a ListField properly dereferences generic references and | ||||||
|  |         respects choices. | ||||||
|  |         """ | ||||||
|  |         class Link(Document): | ||||||
|  |             title = StringField() | ||||||
|  | 
 | ||||||
|  |         class Post(Document): | ||||||
|  |             title = StringField() | ||||||
|  | 
 | ||||||
|  |         class User(Document): | ||||||
|  |             bookmarks = ListField(GenericReferenceField(choices=(Post,))) | ||||||
|  | 
 | ||||||
|  |         Link.drop_collection() | ||||||
|  |         Post.drop_collection() | ||||||
|  |         User.drop_collection() | ||||||
|  | 
 | ||||||
|  |         link_1 = Link(title="Pitchfork") | ||||||
|  |         link_1.save() | ||||||
|  | 
 | ||||||
|  |         post_1 = Post(title="Behind the Scenes of the Pavement Reunion") | ||||||
|  |         post_1.save() | ||||||
|  | 
 | ||||||
|  |         user = User(bookmarks=[link_1]) | ||||||
|  |         self.assertRaises(ValidationError, user.validate) | ||||||
|  | 
 | ||||||
|  |         user = User(bookmarks=[post_1]) | ||||||
|  |         user.save() | ||||||
|  | 
 | ||||||
|  |         user = User.objects.first() | ||||||
|  |         self.assertEqual(user.bookmarks, [post_1]) | ||||||
|  | 
 | ||||||
|  |         Link.drop_collection() | ||||||
|  |         Post.drop_collection() | ||||||
|  |         User.drop_collection() | ||||||
|  | 
 | ||||||
|     def test_binary_fields(self): |     def test_binary_fields(self): | ||||||
|         """Ensure that binary fields can be stored and retrieved. |         """Ensure that binary fields can be stored and retrieved. | ||||||
|         """ |         """ | ||||||
| @@ -1213,6 +1521,53 @@ class FieldTest(unittest.TestCase): | |||||||
| 
 | 
 | ||||||
|         Shirt.drop_collection() |         Shirt.drop_collection() | ||||||
| 
 | 
 | ||||||
|  |     def test_simple_choices_validation(self): | ||||||
|  |         """Ensure that value is in a container of allowed values. | ||||||
|  |         """ | ||||||
|  |         class Shirt(Document): | ||||||
|  |             size = StringField(max_length=3, choices=('S', 'M', 'L', 'XL', 'XXL')) | ||||||
|  | 
 | ||||||
|  |         Shirt.drop_collection() | ||||||
|  | 
 | ||||||
|  |         shirt = Shirt() | ||||||
|  |         shirt.validate() | ||||||
|  | 
 | ||||||
|  |         shirt.size = "S" | ||||||
|  |         shirt.validate() | ||||||
|  | 
 | ||||||
|  |         shirt.size = "XS" | ||||||
|  |         self.assertRaises(ValidationError, shirt.validate) | ||||||
|  | 
 | ||||||
|  |         Shirt.drop_collection() | ||||||
|  | 
 | ||||||
|  |     def test_simple_choices_get_field_display(self): | ||||||
|  |         """Test dynamic helper for returning the display value of a choices field. | ||||||
|  |         """ | ||||||
|  |         class Shirt(Document): | ||||||
|  |             size = StringField(max_length=3, choices=('S', 'M', 'L', 'XL', 'XXL')) | ||||||
|  |             style = StringField(max_length=3, choices=('Small', 'Baggy', 'wide'), default='Small') | ||||||
|  | 
 | ||||||
|  |         Shirt.drop_collection() | ||||||
|  | 
 | ||||||
|  |         shirt = Shirt() | ||||||
|  | 
 | ||||||
|  |         self.assertEqual(shirt.get_size_display(), None) | ||||||
|  |         self.assertEqual(shirt.get_style_display(), 'Small') | ||||||
|  | 
 | ||||||
|  |         shirt.size = "XXL" | ||||||
|  |         shirt.style = "Baggy" | ||||||
|  |         self.assertEqual(shirt.get_size_display(), 'XXL') | ||||||
|  |         self.assertEqual(shirt.get_style_display(), 'Baggy') | ||||||
|  | 
 | ||||||
|  |         # Set as Z - an invalid choice | ||||||
|  |         shirt.size = "Z" | ||||||
|  |         shirt.style = "Z" | ||||||
|  |         self.assertEqual(shirt.get_size_display(), 'Z') | ||||||
|  |         self.assertEqual(shirt.get_style_display(), 'Z') | ||||||
|  |         self.assertRaises(ValidationError, shirt.validate) | ||||||
|  | 
 | ||||||
|  |         Shirt.drop_collection() | ||||||
|  | 
 | ||||||
|     def test_file_fields(self): |     def test_file_fields(self): | ||||||
|         """Ensure that file fields can be written to and their data retrieved |         """Ensure that file fields can be written to and their data retrieved | ||||||
|         """ |         """ | ||||||
| @@ -1242,6 +1597,21 @@ class FieldTest(unittest.TestCase): | |||||||
|         self.assertEquals(result.file.read(), text) |         self.assertEquals(result.file.read(), text) | ||||||
|         self.assertEquals(result.file.content_type, content_type) |         self.assertEquals(result.file.content_type, content_type) | ||||||
|         result.file.delete() # Remove file from GridFS |         result.file.delete() # Remove file from GridFS | ||||||
|  |         PutFile.objects.delete() | ||||||
|  | 
 | ||||||
|  |         # Ensure file-like objects are stored | ||||||
|  |         putfile = PutFile() | ||||||
|  |         putstring = StringIO.StringIO() | ||||||
|  |         putstring.write(text) | ||||||
|  |         putstring.seek(0) | ||||||
|  |         putfile.file.put(putstring, content_type=content_type) | ||||||
|  |         putfile.save() | ||||||
|  |         putfile.validate() | ||||||
|  |         result = PutFile.objects.first() | ||||||
|  |         self.assertTrue(putfile == result) | ||||||
|  |         self.assertEquals(result.file.read(), text) | ||||||
|  |         self.assertEquals(result.file.content_type, content_type) | ||||||
|  |         result.file.delete() | ||||||
| 
 | 
 | ||||||
|         streamfile = StreamFile() |         streamfile = StreamFile() | ||||||
|         streamfile.file.new_file(content_type=content_type) |         streamfile.file.new_file(content_type=content_type) | ||||||
| @@ -1289,7 +1659,50 @@ class FieldTest(unittest.TestCase): | |||||||
|         # Make sure FileField is optional and not required |         # Make sure FileField is optional and not required | ||||||
|         class DemoFile(Document): |         class DemoFile(Document): | ||||||
|             file = FileField() |             file = FileField() | ||||||
|         d = DemoFile.objects.create() |         DemoFile.objects.create() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  |     def test_file_field_no_default(self): | ||||||
|  | 
 | ||||||
|  |         class GridDocument(Document): | ||||||
|  |             the_file = FileField() | ||||||
|  | 
 | ||||||
|  |         GridDocument.drop_collection() | ||||||
|  | 
 | ||||||
|  |         with tempfile.TemporaryFile() as f: | ||||||
|  |             f.write("Hello World!") | ||||||
|  |             f.flush() | ||||||
|  | 
 | ||||||
|  |             # Test without default | ||||||
|  |             doc_a = GridDocument() | ||||||
|  |             doc_a.save() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  |             doc_b = GridDocument.objects.with_id(doc_a.id) | ||||||
|  |             doc_b.the_file.replace(f, filename='doc_b') | ||||||
|  |             doc_b.save() | ||||||
|  |             self.assertNotEquals(doc_b.the_file.grid_id, None) | ||||||
|  | 
 | ||||||
|  |             # Test it matches | ||||||
|  |             doc_c = GridDocument.objects.with_id(doc_b.id) | ||||||
|  |             self.assertEquals(doc_b.the_file.grid_id, doc_c.the_file.grid_id) | ||||||
|  | 
 | ||||||
|  |             # Test with default | ||||||
|  |             doc_d = GridDocument(the_file='') | ||||||
|  |             doc_d.save() | ||||||
|  | 
 | ||||||
|  |             doc_e = GridDocument.objects.with_id(doc_d.id) | ||||||
|  |             self.assertEquals(doc_d.the_file.grid_id, doc_e.the_file.grid_id) | ||||||
|  | 
 | ||||||
|  |             doc_e.the_file.replace(f, filename='doc_e') | ||||||
|  |             doc_e.save() | ||||||
|  | 
 | ||||||
|  |             doc_f = GridDocument.objects.with_id(doc_e.id) | ||||||
|  |             self.assertEquals(doc_e.the_file.grid_id, doc_f.the_file.grid_id) | ||||||
|  | 
 | ||||||
|  |         db = GridDocument._get_db() | ||||||
|  |         grid_fs = gridfs.GridFS(db) | ||||||
|  |         self.assertEquals(['doc_b', 'doc_e'], grid_fs.list()) | ||||||
| 
 | 
 | ||||||
|     def test_file_uniqueness(self): |     def test_file_uniqueness(self): | ||||||
|         """Ensure that each instance of a FileField is unique |         """Ensure that each instance of a FileField is unique | ||||||
| @@ -1328,6 +1741,95 @@ class FieldTest(unittest.TestCase): | |||||||
| 
 | 
 | ||||||
|         TestFile.drop_collection() |         TestFile.drop_collection() | ||||||
| 
 | 
 | ||||||
|  |     def test_image_field(self): | ||||||
|  | 
 | ||||||
|  |         class TestImage(Document): | ||||||
|  |             image = ImageField() | ||||||
|  | 
 | ||||||
|  |         TestImage.drop_collection() | ||||||
|  | 
 | ||||||
|  |         t = TestImage() | ||||||
|  |         t.image.put(open(TEST_IMAGE_PATH, 'r')) | ||||||
|  |         t.save() | ||||||
|  | 
 | ||||||
|  |         t = TestImage.objects.first() | ||||||
|  | 
 | ||||||
|  |         self.assertEquals(t.image.format, 'PNG') | ||||||
|  | 
 | ||||||
|  |         w, h = t.image.size | ||||||
|  |         self.assertEquals(w, 371) | ||||||
|  |         self.assertEquals(h, 76) | ||||||
|  | 
 | ||||||
|  |         t.image.delete() | ||||||
|  | 
 | ||||||
|  |     def test_image_field_resize(self): | ||||||
|  | 
 | ||||||
|  |         class TestImage(Document): | ||||||
|  |             image = ImageField(size=(185, 37)) | ||||||
|  | 
 | ||||||
|  |         TestImage.drop_collection() | ||||||
|  | 
 | ||||||
|  |         t = TestImage() | ||||||
|  |         t.image.put(open(TEST_IMAGE_PATH, 'r')) | ||||||
|  |         t.save() | ||||||
|  | 
 | ||||||
|  |         t = TestImage.objects.first() | ||||||
|  | 
 | ||||||
|  |         self.assertEquals(t.image.format, 'PNG') | ||||||
|  |         w, h = t.image.size | ||||||
|  | 
 | ||||||
|  |         self.assertEquals(w, 185) | ||||||
|  |         self.assertEquals(h, 37) | ||||||
|  | 
 | ||||||
|  |         t.image.delete() | ||||||
|  | 
 | ||||||
|  |     def test_image_field_thumbnail(self): | ||||||
|  | 
 | ||||||
|  |         class TestImage(Document): | ||||||
|  |             image = ImageField(thumbnail_size=(92, 18)) | ||||||
|  | 
 | ||||||
|  |         TestImage.drop_collection() | ||||||
|  | 
 | ||||||
|  |         t = TestImage() | ||||||
|  |         t.image.put(open(TEST_IMAGE_PATH, 'r')) | ||||||
|  |         t.save() | ||||||
|  | 
 | ||||||
|  |         t = TestImage.objects.first() | ||||||
|  | 
 | ||||||
|  |         self.assertEquals(t.image.thumbnail.format, 'PNG') | ||||||
|  |         self.assertEquals(t.image.thumbnail.width, 92) | ||||||
|  |         self.assertEquals(t.image.thumbnail.height, 18) | ||||||
|  | 
 | ||||||
|  |         t.image.delete() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  |     def test_file_multidb(self): | ||||||
|  |         register_connection('testfiles', 'testfiles') | ||||||
|  |         class TestFile(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             file = FileField(db_alias="testfiles", | ||||||
|  |                              collection_name="macumba") | ||||||
|  | 
 | ||||||
|  |         TestFile.drop_collection() | ||||||
|  | 
 | ||||||
|  |         # delete old filesystem | ||||||
|  |         get_db("testfiles").macumba.files.drop() | ||||||
|  |         get_db("testfiles").macumba.chunks.drop() | ||||||
|  | 
 | ||||||
|  |         # First instance | ||||||
|  |         testfile = TestFile() | ||||||
|  |         testfile.name = "Hello, World!" | ||||||
|  |         testfile.file.put('Hello, World!', | ||||||
|  |                           name="hello.txt") | ||||||
|  |         testfile.save() | ||||||
|  | 
 | ||||||
|  |         data = get_db("testfiles").macumba.files.find_one() | ||||||
|  |         self.assertEquals(data.get('name'), 'hello.txt') | ||||||
|  | 
 | ||||||
|  |         testfile = TestFile.objects.first() | ||||||
|  |         self.assertEquals(testfile.file.read(), | ||||||
|  |                           'Hello, World!') | ||||||
|  | 
 | ||||||
|     def test_geo_indexes(self): |     def test_geo_indexes(self): | ||||||
|         """Ensure that indexes are created automatically for GeoPointFields. |         """Ensure that indexes are created automatically for GeoPointFields. | ||||||
|         """ |         """ | ||||||
| @@ -1488,7 +1990,6 @@ class FieldTest(unittest.TestCase): | |||||||
|         c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'}) |         c = self.db['mongoengine.counters'].find_one({'_id': 'animal.id'}) | ||||||
|         self.assertEqual(c['next'], 10) |         self.assertEqual(c['next'], 10) | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
|     def test_generic_embedded_document(self): |     def test_generic_embedded_document(self): | ||||||
|         class Car(EmbeddedDocument): |         class Car(EmbeddedDocument): | ||||||
|             name = StringField() |             name = StringField() | ||||||
| @@ -1501,6 +2002,8 @@ class FieldTest(unittest.TestCase): | |||||||
|             name = StringField() |             name = StringField() | ||||||
|             like = GenericEmbeddedDocumentField() |             like = GenericEmbeddedDocumentField() | ||||||
| 
 | 
 | ||||||
|  |         Person.drop_collection() | ||||||
|  | 
 | ||||||
|         person = Person(name='Test User') |         person = Person(name='Test User') | ||||||
|         person.like = Car(name='Fiat') |         person.like = Car(name='Fiat') | ||||||
|         person.save() |         person.save() | ||||||
| @@ -1514,5 +2017,103 @@ class FieldTest(unittest.TestCase): | |||||||
|         person = Person.objects.first() |         person = Person.objects.first() | ||||||
|         self.assertTrue(isinstance(person.like, Dish)) |         self.assertTrue(isinstance(person.like, Dish)) | ||||||
| 
 | 
 | ||||||
|  |     def test_generic_embedded_document_choices(self): | ||||||
|  |         """Ensure you can limit GenericEmbeddedDocument choices | ||||||
|  |         """ | ||||||
|  |         class Car(EmbeddedDocument): | ||||||
|  |             name = StringField() | ||||||
|  | 
 | ||||||
|  |         class Dish(EmbeddedDocument): | ||||||
|  |             food = StringField(required=True) | ||||||
|  |             number = IntField() | ||||||
|  | 
 | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             like = GenericEmbeddedDocumentField(choices=(Dish,)) | ||||||
|  | 
 | ||||||
|  |         Person.drop_collection() | ||||||
|  | 
 | ||||||
|  |         person = Person(name='Test User') | ||||||
|  |         person.like = Car(name='Fiat') | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  | 
 | ||||||
|  |         person.like = Dish(food="arroz", number=15) | ||||||
|  |         person.save() | ||||||
|  | 
 | ||||||
|  |         person = Person.objects.first() | ||||||
|  |         self.assertTrue(isinstance(person.like, Dish)) | ||||||
|  | 
 | ||||||
|  |     def test_generic_list_embedded_document_choices(self): | ||||||
|  |         """Ensure you can limit GenericEmbeddedDocument choices inside a list | ||||||
|  |         field | ||||||
|  |         """ | ||||||
|  |         class Car(EmbeddedDocument): | ||||||
|  |             name = StringField() | ||||||
|  | 
 | ||||||
|  |         class Dish(EmbeddedDocument): | ||||||
|  |             food = StringField(required=True) | ||||||
|  |             number = IntField() | ||||||
|  | 
 | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             likes = ListField(GenericEmbeddedDocumentField(choices=(Dish,))) | ||||||
|  | 
 | ||||||
|  |         Person.drop_collection() | ||||||
|  | 
 | ||||||
|  |         person = Person(name='Test User') | ||||||
|  |         person.likes = [Car(name='Fiat')] | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  | 
 | ||||||
|  |         person.likes = [Dish(food="arroz", number=15)] | ||||||
|  |         person.save() | ||||||
|  | 
 | ||||||
|  |         person = Person.objects.first() | ||||||
|  |         self.assertTrue(isinstance(person.likes[0], Dish)) | ||||||
|  | 
 | ||||||
|  |     def test_recursive_validation(self): | ||||||
|  |         """Ensure that a validation result to_dict is available. | ||||||
|  |         """ | ||||||
|  |         class Author(EmbeddedDocument): | ||||||
|  |             name = StringField(required=True) | ||||||
|  | 
 | ||||||
|  |         class Comment(EmbeddedDocument): | ||||||
|  |             author = EmbeddedDocumentField(Author, required=True) | ||||||
|  |             content = StringField(required=True) | ||||||
|  | 
 | ||||||
|  |         class Post(Document): | ||||||
|  |             title = StringField(required=True) | ||||||
|  |             comments = ListField(EmbeddedDocumentField(Comment)) | ||||||
|  | 
 | ||||||
|  |         bob = Author(name='Bob') | ||||||
|  |         post = Post(title='hello world') | ||||||
|  |         post.comments.append(Comment(content='hello', author=bob)) | ||||||
|  |         post.comments.append(Comment(author=bob)) | ||||||
|  | 
 | ||||||
|  |         try: | ||||||
|  |             post.validate() | ||||||
|  |         except ValidationError, error: | ||||||
|  |             pass | ||||||
|  | 
 | ||||||
|  |         # ValidationError.errors property | ||||||
|  |         self.assertTrue(hasattr(error, 'errors')) | ||||||
|  |         self.assertTrue(isinstance(error.errors, dict)) | ||||||
|  |         self.assertTrue('comments' in error.errors) | ||||||
|  |         self.assertTrue(1 in error.errors['comments']) | ||||||
|  |         self.assertTrue(isinstance(error.errors['comments'][1]['content'], | ||||||
|  |                         ValidationError)) | ||||||
|  | 
 | ||||||
|  |         # ValidationError.schema property | ||||||
|  |         error_dict = error.to_dict() | ||||||
|  |         self.assertTrue(isinstance(error_dict, dict)) | ||||||
|  |         self.assertTrue('comments' in error_dict) | ||||||
|  |         self.assertTrue(1 in error_dict['comments']) | ||||||
|  |         self.assertTrue('content' in error_dict['comments'][1]) | ||||||
|  |         self.assertEquals(error_dict['comments'][1]['content'], | ||||||
|  |                           u'Field is required ("content")') | ||||||
|  | 
 | ||||||
|  |         post.comments[1].content = 'here we go' | ||||||
|  |         post.validate() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|     unittest.main() |     unittest.main() | ||||||
| @@ -1,13 +1,14 @@ | |||||||
| # -*- coding: utf-8 -*- | # -*- coding: utf-8 -*- | ||||||
| import unittest | import unittest | ||||||
| import pymongo | import pymongo | ||||||
|  | from bson import ObjectId | ||||||
| from datetime import datetime, timedelta | from datetime import datetime, timedelta | ||||||
| 
 | 
 | ||||||
| from mongoengine.queryset import (QuerySet, QuerySetManager, | from mongoengine.queryset import (QuerySet, QuerySetManager, | ||||||
|                                   MultipleObjectsReturned, DoesNotExist, |                                   MultipleObjectsReturned, DoesNotExist, | ||||||
|                                   QueryFieldList) |                                   QueryFieldList) | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
| from mongoengine.connection import _get_connection | from mongoengine.connection import get_connection | ||||||
| from mongoengine.tests import query_counter | from mongoengine.tests import query_counter | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @@ -19,6 +20,7 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         class Person(Document): |         class Person(Document): | ||||||
|             name = StringField() |             name = StringField() | ||||||
|             age = IntField() |             age = IntField() | ||||||
|  |             meta = {'allow_inheritance': True} | ||||||
|         self.Person = Person |         self.Person = Person | ||||||
| 
 | 
 | ||||||
|     def test_initialisation(self): |     def test_initialisation(self): | ||||||
| @@ -59,8 +61,7 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         self.assertEqual(len(people), 2) |         self.assertEqual(len(people), 2) | ||||||
|         results = list(people) |         results = list(people) | ||||||
|         self.assertTrue(isinstance(results[0], self.Person)) |         self.assertTrue(isinstance(results[0], self.Person)) | ||||||
|         self.assertTrue(isinstance(results[0].id, (pymongo.objectid.ObjectId, |         self.assertTrue(isinstance(results[0].id, (ObjectId, str, unicode))) | ||||||
|                                                    str, unicode))) |  | ||||||
|         self.assertEqual(results[0].name, "User A") |         self.assertEqual(results[0].name, "User A") | ||||||
|         self.assertEqual(results[0].age, 20) |         self.assertEqual(results[0].age, 20) | ||||||
|         self.assertEqual(results[1].name, "User B") |         self.assertEqual(results[1].name, "User B") | ||||||
| @@ -110,6 +111,16 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         people = list(self.Person.objects[80000:80001]) |         people = list(self.Person.objects[80000:80001]) | ||||||
|         self.assertEqual(len(people), 0) |         self.assertEqual(len(people), 0) | ||||||
| 
 | 
 | ||||||
|  |         # Test larger slice __repr__ | ||||||
|  |         self.Person.objects.delete() | ||||||
|  |         for i in xrange(55): | ||||||
|  |             self.Person(name='A%s' % i, age=i).save() | ||||||
|  | 
 | ||||||
|  |         self.assertEqual(len(self.Person.objects), 55) | ||||||
|  |         self.assertEqual("Person object", "%s" % self.Person.objects[0]) | ||||||
|  |         self.assertEqual("[<Person: Person object>, <Person: Person object>]",  "%s" % self.Person.objects[1:3]) | ||||||
|  |         self.assertEqual("[<Person: Person object>, <Person: Person object>]",  "%s" % self.Person.objects[51:53]) | ||||||
|  | 
 | ||||||
|     def test_find_one(self): |     def test_find_one(self): | ||||||
|         """Ensure that a query using find_one returns a valid result. |         """Ensure that a query using find_one returns a valid result. | ||||||
|         """ |         """ | ||||||
| @@ -144,6 +155,8 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         person = self.Person.objects.with_id(person1.id) |         person = self.Person.objects.with_id(person1.id) | ||||||
|         self.assertEqual(person.name, "User A") |         self.assertEqual(person.name, "User A") | ||||||
| 
 | 
 | ||||||
|  |         self.assertRaises(InvalidQueryError, self.Person.objects(name="User A").with_id, person1.id) | ||||||
|  | 
 | ||||||
|     def test_find_only_one(self): |     def test_find_only_one(self): | ||||||
|         """Ensure that a query using ``get`` returns at most one result. |         """Ensure that a query using ``get`` returns at most one result. | ||||||
|         """ |         """ | ||||||
| @@ -316,11 +329,11 @@ class QuerySetTest(unittest.TestCase): | |||||||
| 
 | 
 | ||||||
|         BlogPost(title="ABC", comments=[c1, c2]).save() |         BlogPost(title="ABC", comments=[c1, c2]).save() | ||||||
| 
 | 
 | ||||||
|         BlogPost.objects(comments__by="joe").update(inc__comments__S__votes=1) |         BlogPost.objects(comments__by="jane").update(inc__comments__S__votes=1) | ||||||
| 
 | 
 | ||||||
|         post = BlogPost.objects.first() |         post = BlogPost.objects.first() | ||||||
|         self.assertEquals(post.comments[0].by, 'joe') |         self.assertEquals(post.comments[1].by, 'jane') | ||||||
|         self.assertEquals(post.comments[0].votes, 4) |         self.assertEquals(post.comments[1].votes, 8) | ||||||
| 
 | 
 | ||||||
|         # Currently the $ operator only applies to the first matched item in |         # Currently the $ operator only applies to the first matched item in | ||||||
|         # the query |         # the query | ||||||
| @@ -368,6 +381,34 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         self.assertRaises(OperationError, update_nested) |         self.assertRaises(OperationError, update_nested) | ||||||
|         Simple.drop_collection() |         Simple.drop_collection() | ||||||
| 
 | 
 | ||||||
|  |     def test_update_using_positional_operator_embedded_document(self): | ||||||
|  |         """Ensure that the embedded documents can be updated using the positional | ||||||
|  |         operator.""" | ||||||
|  | 
 | ||||||
|  |         class Vote(EmbeddedDocument): | ||||||
|  |             score = IntField() | ||||||
|  | 
 | ||||||
|  |         class Comment(EmbeddedDocument): | ||||||
|  |             by = StringField() | ||||||
|  |             votes = EmbeddedDocumentField(Vote) | ||||||
|  | 
 | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             title = StringField() | ||||||
|  |             comments = ListField(EmbeddedDocumentField(Comment)) | ||||||
|  | 
 | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  | 
 | ||||||
|  |         c1 = Comment(by="joe", votes=Vote(score=3)) | ||||||
|  |         c2 = Comment(by="jane", votes=Vote(score=7)) | ||||||
|  | 
 | ||||||
|  |         BlogPost(title="ABC", comments=[c1, c2]).save() | ||||||
|  | 
 | ||||||
|  |         BlogPost.objects(comments__by="joe").update(set__comments__S__votes=Vote(score=4)) | ||||||
|  | 
 | ||||||
|  |         post = BlogPost.objects.first() | ||||||
|  |         self.assertEquals(post.comments[0].by, 'joe') | ||||||
|  |         self.assertEquals(post.comments[0].votes.score, 4) | ||||||
|  | 
 | ||||||
|     def test_mapfield_update(self): |     def test_mapfield_update(self): | ||||||
|         """Ensure that the MapField can be updated.""" |         """Ensure that the MapField can be updated.""" | ||||||
|         class Member(EmbeddedDocument): |         class Member(EmbeddedDocument): | ||||||
| @@ -439,7 +480,7 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         self.assertEqual(person.name, "User C") |         self.assertEqual(person.name, "User C") | ||||||
| 
 | 
 | ||||||
|     def test_bulk_insert(self): |     def test_bulk_insert(self): | ||||||
|         """Ensure that query by array position works. |         """Ensure that bulk insert works | ||||||
|         """ |         """ | ||||||
| 
 | 
 | ||||||
|         class Comment(EmbeddedDocument): |         class Comment(EmbeddedDocument): | ||||||
| @@ -449,12 +490,15 @@ class QuerySetTest(unittest.TestCase): | |||||||
|             comments = ListField(EmbeddedDocumentField(Comment)) |             comments = ListField(EmbeddedDocumentField(Comment)) | ||||||
| 
 | 
 | ||||||
|         class Blog(Document): |         class Blog(Document): | ||||||
|             title = StringField() |             title = StringField(unique=True) | ||||||
|             tags = ListField(StringField()) |             tags = ListField(StringField()) | ||||||
|             posts = ListField(EmbeddedDocumentField(Post)) |             posts = ListField(EmbeddedDocumentField(Post)) | ||||||
| 
 | 
 | ||||||
|         Blog.drop_collection() |         Blog.drop_collection() | ||||||
| 
 | 
 | ||||||
|  |         # Recreates the collection | ||||||
|  |         self.assertEqual(0, Blog.objects.count()) | ||||||
|  | 
 | ||||||
|         with query_counter() as q: |         with query_counter() as q: | ||||||
|             self.assertEqual(q, 0) |             self.assertEqual(q, 0) | ||||||
| 
 | 
 | ||||||
| @@ -468,10 +512,10 @@ class QuerySetTest(unittest.TestCase): | |||||||
|                 blogs.append(Blog(title="post %s" % i, posts=[post1, post2])) |                 blogs.append(Blog(title="post %s" % i, posts=[post1, post2])) | ||||||
| 
 | 
 | ||||||
|             Blog.objects.insert(blogs, load_bulk=False) |             Blog.objects.insert(blogs, load_bulk=False) | ||||||
|             self.assertEqual(q, 2) # 1 for the inital connection and 1 for the insert |             self.assertEqual(q, 1) # 1 for the insert | ||||||
| 
 | 
 | ||||||
|             Blog.objects.insert(blogs) |             Blog.objects.insert(blogs) | ||||||
|             self.assertEqual(q, 4) # 1 for insert, and 1 for in bulk |             self.assertEqual(q, 3) # 1 for insert, and 1 for in bulk fetch (3 in total) | ||||||
| 
 | 
 | ||||||
|         Blog.drop_collection() |         Blog.drop_collection() | ||||||
| 
 | 
 | ||||||
| @@ -519,6 +563,23 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         obj_id = Blog.objects.insert(blog1, load_bulk=False) |         obj_id = Blog.objects.insert(blog1, load_bulk=False) | ||||||
|         self.assertEquals(obj_id.__class__.__name__, 'ObjectId') |         self.assertEquals(obj_id.__class__.__name__, 'ObjectId') | ||||||
| 
 | 
 | ||||||
|  |         Blog.drop_collection() | ||||||
|  |         post3 = Post(comments=[comment1, comment1]) | ||||||
|  |         blog1 = Blog(title="foo", posts=[post1, post2]) | ||||||
|  |         blog2 = Blog(title="bar", posts=[post2, post3]) | ||||||
|  |         blog3 = Blog(title="baz", posts=[post1, post2]) | ||||||
|  |         Blog.objects.insert([blog1, blog2]) | ||||||
|  | 
 | ||||||
|  |         def throw_operation_error_not_unique(): | ||||||
|  |             Blog.objects.insert([blog2, blog3], safe=True) | ||||||
|  | 
 | ||||||
|  |         self.assertRaises(OperationError, throw_operation_error_not_unique) | ||||||
|  |         self.assertEqual(Blog.objects.count(), 2) | ||||||
|  | 
 | ||||||
|  |         Blog.objects.insert([blog2, blog3], write_options={'continue_on_error': True}) | ||||||
|  |         self.assertEqual(Blog.objects.count(), 3) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|     def test_slave_okay(self): |     def test_slave_okay(self): | ||||||
|         """Ensures that a query can take slave_okay syntax |         """Ensures that a query can take slave_okay syntax | ||||||
|         """ |         """ | ||||||
| @@ -567,7 +628,13 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         people1 = [person for person in queryset] |         people1 = [person for person in queryset] | ||||||
|         people2 = [person for person in queryset] |         people2 = [person for person in queryset] | ||||||
| 
 | 
 | ||||||
|  |         # Check that it still works even if iteration is interrupted. | ||||||
|  |         for person in queryset: | ||||||
|  |             break | ||||||
|  |         people3 = [person for person in queryset] | ||||||
|  | 
 | ||||||
|         self.assertEqual(people1, people2) |         self.assertEqual(people1, people2) | ||||||
|  |         self.assertEqual(people1, people3) | ||||||
| 
 | 
 | ||||||
|     def test_repr_iteration(self): |     def test_repr_iteration(self): | ||||||
|         """Ensure that QuerySet __repr__ can handle loops |         """Ensure that QuerySet __repr__ can handle loops | ||||||
| @@ -1277,6 +1344,37 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         self.Person.objects(name='Test User').delete() |         self.Person.objects(name='Test User').delete() | ||||||
|         self.assertEqual(1, BlogPost.objects.count()) |         self.assertEqual(1, BlogPost.objects.count()) | ||||||
| 
 | 
 | ||||||
|  |     def test_reverse_delete_rule_cascade_self_referencing(self): | ||||||
|  |         """Ensure self-referencing CASCADE deletes do not result in infinite loop | ||||||
|  |         """ | ||||||
|  |         class Category(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             parent = ReferenceField('self', reverse_delete_rule=CASCADE) | ||||||
|  | 
 | ||||||
|  |         num_children = 3 | ||||||
|  |         base = Category(name='Root') | ||||||
|  |         base.save() | ||||||
|  | 
 | ||||||
|  |         # Create a simple parent-child tree | ||||||
|  |         for i in range(num_children): | ||||||
|  |             child_name = 'Child-%i' % i | ||||||
|  |             child = Category(name=child_name, parent=base) | ||||||
|  |             child.save() | ||||||
|  | 
 | ||||||
|  |             for i in range(num_children): | ||||||
|  |                 child_child_name = 'Child-Child-%i' % i | ||||||
|  |                 child_child = Category(name=child_child_name, parent=child) | ||||||
|  |                 child_child.save() | ||||||
|  | 
 | ||||||
|  |         tree_size = 1 + num_children + (num_children * num_children) | ||||||
|  |         self.assertEquals(tree_size, Category.objects.count()) | ||||||
|  |         self.assertEquals(num_children, Category.objects(parent=base).count()) | ||||||
|  | 
 | ||||||
|  |         # The delete should effectively wipe out the Category collection | ||||||
|  |         # without resulting in infinite parent-child cascade recursion | ||||||
|  |         base.delete() | ||||||
|  |         self.assertEquals(0, Category.objects.count()) | ||||||
|  | 
 | ||||||
|     def test_reverse_delete_rule_nullify(self): |     def test_reverse_delete_rule_nullify(self): | ||||||
|         """Ensure nullification of references to deleted documents. |         """Ensure nullification of references to deleted documents. | ||||||
|         """ |         """ | ||||||
| @@ -1321,6 +1419,36 @@ class QuerySetTest(unittest.TestCase): | |||||||
| 
 | 
 | ||||||
|         self.assertRaises(OperationError, self.Person.objects.delete) |         self.assertRaises(OperationError, self.Person.objects.delete) | ||||||
| 
 | 
 | ||||||
|  |     def test_reverse_delete_rule_pull(self): | ||||||
|  |         """Ensure pulling of references to deleted documents. | ||||||
|  |         """ | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             content = StringField() | ||||||
|  |             authors = ListField(ReferenceField(self.Person, | ||||||
|  |                 reverse_delete_rule=PULL)) | ||||||
|  | 
 | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |         self.Person.drop_collection() | ||||||
|  | 
 | ||||||
|  |         me = self.Person(name='Test User') | ||||||
|  |         me.save() | ||||||
|  | 
 | ||||||
|  |         someoneelse = self.Person(name='Some-one Else') | ||||||
|  |         someoneelse.save() | ||||||
|  | 
 | ||||||
|  |         post = BlogPost(content='Watching TV', authors=[me, someoneelse]) | ||||||
|  |         post.save() | ||||||
|  | 
 | ||||||
|  |         another = BlogPost(content='Chilling Out', authors=[someoneelse]) | ||||||
|  |         another.save() | ||||||
|  | 
 | ||||||
|  |         someoneelse.delete() | ||||||
|  |         post.reload() | ||||||
|  |         another.reload() | ||||||
|  | 
 | ||||||
|  |         self.assertEqual(post.authors, [me]) | ||||||
|  |         self.assertEqual(another.authors, []) | ||||||
|  | 
 | ||||||
|     def test_update(self): |     def test_update(self): | ||||||
|         """Ensure that atomic updates work properly. |         """Ensure that atomic updates work properly. | ||||||
|         """ |         """ | ||||||
| @@ -1371,20 +1499,68 @@ class QuerySetTest(unittest.TestCase): | |||||||
| 
 | 
 | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
| 
 | 
 | ||||||
|     def test_update_pull(self): |     def test_update_push_and_pull(self): | ||||||
|         """Ensure that the 'pull' update operation works correctly. |         """Ensure that the 'pull' update operation works correctly. | ||||||
|         """ |         """ | ||||||
|         class BlogPost(Document): |         class BlogPost(Document): | ||||||
|             slug = StringField() |             slug = StringField() | ||||||
|             tags = ListField(StringField()) |             tags = ListField(StringField()) | ||||||
| 
 | 
 | ||||||
|         post = BlogPost(slug="test", tags=['code', 'mongodb', 'code']) |         BlogPost.drop_collection() | ||||||
|  | 
 | ||||||
|  |         post = BlogPost(slug="test") | ||||||
|         post.save() |         post.save() | ||||||
| 
 | 
 | ||||||
|  |         BlogPost.objects.filter(id=post.id).update(push__tags="code") | ||||||
|  |         post.reload() | ||||||
|  |         self.assertEqual(post.tags, ["code"]) | ||||||
|  | 
 | ||||||
|  |         BlogPost.objects.filter(id=post.id).update(push_all__tags=["mongodb", "code"]) | ||||||
|  |         post.reload() | ||||||
|  |         self.assertEqual(post.tags, ["code", "mongodb", "code"]) | ||||||
|  | 
 | ||||||
|         BlogPost.objects(slug="test").update(pull__tags="code") |         BlogPost.objects(slug="test").update(pull__tags="code") | ||||||
|         post.reload() |         post.reload() | ||||||
|         self.assertTrue('code' not in post.tags) |         self.assertEqual(post.tags, ["mongodb"]) | ||||||
|         self.assertEqual(len(post.tags), 1) | 
 | ||||||
|  | 
 | ||||||
|  |         BlogPost.objects(slug="test").update(pull_all__tags=["mongodb", "code"]) | ||||||
|  |         post.reload() | ||||||
|  |         self.assertEqual(post.tags, []) | ||||||
|  | 
 | ||||||
|  |         BlogPost.objects(slug="test").update(__raw__={"$addToSet": {"tags": {"$each": ["code", "mongodb", "code"]}}}) | ||||||
|  |         post.reload() | ||||||
|  |         self.assertEqual(post.tags, ["code", "mongodb"]) | ||||||
|  | 
 | ||||||
|  |     def test_pull_nested(self): | ||||||
|  | 
 | ||||||
|  |         class User(Document): | ||||||
|  |             name = StringField() | ||||||
|  | 
 | ||||||
|  |         class Collaborator(EmbeddedDocument): | ||||||
|  |             user = StringField() | ||||||
|  | 
 | ||||||
|  |             def __unicode__(self): | ||||||
|  |                 return '%s' % self.user | ||||||
|  | 
 | ||||||
|  |         class Site(Document): | ||||||
|  |             name = StringField(max_length=75, unique=True, required=True) | ||||||
|  |             collaborators = ListField(EmbeddedDocumentField(Collaborator)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  |         Site.drop_collection() | ||||||
|  | 
 | ||||||
|  |         c = Collaborator(user='Esteban') | ||||||
|  |         s = Site(name="test", collaborators=[c]) | ||||||
|  |         s.save() | ||||||
|  | 
 | ||||||
|  |         Site.objects(id=s.id).update_one(pull__collaborators__user='Esteban') | ||||||
|  |         self.assertEqual(Site.objects.first().collaborators, []) | ||||||
|  | 
 | ||||||
|  |         def pull_all(): | ||||||
|  |             Site.objects(id=s.id).update_one(pull_all__collaborators__user=['Ross']) | ||||||
|  | 
 | ||||||
|  |         self.assertRaises(InvalidQueryError, pull_all) | ||||||
| 
 | 
 | ||||||
|     def test_update_one_pop_generic_reference(self): |     def test_update_one_pop_generic_reference(self): | ||||||
| 
 | 
 | ||||||
| @@ -1449,6 +1625,37 @@ class QuerySetTest(unittest.TestCase): | |||||||
| 
 | 
 | ||||||
|         BlogPost.drop_collection() |         BlogPost.drop_collection() | ||||||
| 
 | 
 | ||||||
|  | 
 | ||||||
|  |     def test_set_list_embedded_documents(self): | ||||||
|  | 
 | ||||||
|  |         class Author(EmbeddedDocument): | ||||||
|  |             name = StringField() | ||||||
|  | 
 | ||||||
|  |         class Message(Document): | ||||||
|  |             title = StringField() | ||||||
|  |             authors = ListField(EmbeddedDocumentField('Author')) | ||||||
|  | 
 | ||||||
|  |         Message.drop_collection() | ||||||
|  | 
 | ||||||
|  |         message = Message(title="hello", authors=[Author(name="Harry")]) | ||||||
|  |         message.save() | ||||||
|  | 
 | ||||||
|  |         Message.objects(authors__name="Harry").update_one( | ||||||
|  |             set__authors__S=Author(name="Ross")) | ||||||
|  | 
 | ||||||
|  |         message = message.reload() | ||||||
|  |         self.assertEquals(message.authors[0].name, "Ross") | ||||||
|  | 
 | ||||||
|  |         Message.objects(authors__name="Ross").update_one( | ||||||
|  |             set__authors=[Author(name="Harry"), | ||||||
|  |                           Author(name="Ross"), | ||||||
|  |                           Author(name="Adam")]) | ||||||
|  | 
 | ||||||
|  |         message = message.reload() | ||||||
|  |         self.assertEquals(message.authors[0].name, "Harry") | ||||||
|  |         self.assertEquals(message.authors[1].name, "Ross") | ||||||
|  |         self.assertEquals(message.authors[2].name, "Adam") | ||||||
|  | 
 | ||||||
|     def test_order_by(self): |     def test_order_by(self): | ||||||
|         """Ensure that QuerySets may be ordered. |         """Ensure that QuerySets may be ordered. | ||||||
|         """ |         """ | ||||||
| @@ -1840,6 +2047,35 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         freq = Person.objects.item_frequencies('city', normalize=True, map_reduce=True) |         freq = Person.objects.item_frequencies('city', normalize=True, map_reduce=True) | ||||||
|         self.assertEquals(freq, {'CRB': 0.5, None: 0.5}) |         self.assertEquals(freq, {'CRB': 0.5, None: 0.5}) | ||||||
| 
 | 
 | ||||||
|  |     def test_item_frequencies_with_null_embedded(self): | ||||||
|  |         class Data(EmbeddedDocument): | ||||||
|  |             name = StringField() | ||||||
|  | 
 | ||||||
|  |         class Extra(EmbeddedDocument): | ||||||
|  |             tag = StringField() | ||||||
|  | 
 | ||||||
|  |         class Person(Document): | ||||||
|  |             data = EmbeddedDocumentField(Data, required=True) | ||||||
|  |             extra = EmbeddedDocumentField(Extra) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  |         Person.drop_collection() | ||||||
|  | 
 | ||||||
|  |         p = Person() | ||||||
|  |         p.data = Data(name="Wilson Jr") | ||||||
|  |         p.save() | ||||||
|  | 
 | ||||||
|  |         p = Person() | ||||||
|  |         p.data = Data(name="Wesley") | ||||||
|  |         p.extra = Extra(tag="friend") | ||||||
|  |         p.save() | ||||||
|  | 
 | ||||||
|  |         ot = Person.objects.item_frequencies('extra.tag', map_reduce=False) | ||||||
|  |         self.assertEquals(ot, {None: 1.0, u'friend': 1.0}) | ||||||
|  | 
 | ||||||
|  |         ot = Person.objects.item_frequencies('extra.tag', map_reduce=True) | ||||||
|  |         self.assertEquals(ot, {None: 1.0, u'friend': 1.0}) | ||||||
|  | 
 | ||||||
|     def test_average(self): |     def test_average(self): | ||||||
|         """Ensure that field can be averaged correctly. |         """Ensure that field can be averaged correctly. | ||||||
|         """ |         """ | ||||||
| @@ -1882,6 +2118,24 @@ class QuerySetTest(unittest.TestCase): | |||||||
|         self.assertEqual(set(self.Person.objects(age=30).distinct('name')), |         self.assertEqual(set(self.Person.objects(age=30).distinct('name')), | ||||||
|                          set(['Mr Orange', 'Mr Pink'])) |                          set(['Mr Orange', 'Mr Pink'])) | ||||||
| 
 | 
 | ||||||
|  |     def test_distinct_handles_references(self): | ||||||
|  |         class Foo(Document): | ||||||
|  |             bar = ReferenceField("Bar") | ||||||
|  | 
 | ||||||
|  |         class Bar(Document): | ||||||
|  |             text = StringField() | ||||||
|  | 
 | ||||||
|  |         Bar.drop_collection() | ||||||
|  |         Foo.drop_collection() | ||||||
|  | 
 | ||||||
|  |         bar = Bar(text="hi") | ||||||
|  |         bar.save() | ||||||
|  | 
 | ||||||
|  |         foo = Foo(bar=bar) | ||||||
|  |         foo.save() | ||||||
|  | 
 | ||||||
|  |         self.assertEquals(Foo.objects.distinct("bar"), [bar]) | ||||||
|  | 
 | ||||||
|     def test_custom_manager(self): |     def test_custom_manager(self): | ||||||
|         """Ensure that custom QuerySetManager instances work as expected. |         """Ensure that custom QuerySetManager instances work as expected. | ||||||
|         """ |         """ | ||||||
| @@ -2200,7 +2454,7 @@ class QuerySetTest(unittest.TestCase): | |||||||
| 
 | 
 | ||||||
|         # check that polygon works for users who have a server >= 1.9 |         # check that polygon works for users who have a server >= 1.9 | ||||||
|         server_version = tuple( |         server_version = tuple( | ||||||
|             _get_connection().server_info()['version'].split('.') |             get_connection().server_info()['version'].split('.') | ||||||
|         ) |         ) | ||||||
|         required_version = tuple("1.9.0".split(".")) |         required_version = tuple("1.9.0".split(".")) | ||||||
|         if server_version >= required_version: |         if server_version >= required_version: | ||||||
| @@ -2569,6 +2823,265 @@ class QuerySetTest(unittest.TestCase): | |||||||
| 
 | 
 | ||||||
|         self.assertRaises(TypeError, invalid_where) |         self.assertRaises(TypeError, invalid_where) | ||||||
| 
 | 
 | ||||||
|  |     def test_scalar(self): | ||||||
|  | 
 | ||||||
|  |         class Organization(Document): | ||||||
|  |             id = ObjectIdField('_id') | ||||||
|  |             name = StringField() | ||||||
|  | 
 | ||||||
|  |         class User(Document): | ||||||
|  |             id = ObjectIdField('_id') | ||||||
|  |             name = StringField() | ||||||
|  |             organization = ObjectIdField() | ||||||
|  | 
 | ||||||
|  |         User.drop_collection() | ||||||
|  |         Organization.drop_collection() | ||||||
|  | 
 | ||||||
|  |         whitehouse = Organization(name="White House") | ||||||
|  |         whitehouse.save() | ||||||
|  |         User(name="Bob Dole", organization=whitehouse.id).save() | ||||||
|  | 
 | ||||||
|  |         # Efficient way to get all unique organization names for a given | ||||||
|  |         # set of users (Pretend this has additional filtering.) | ||||||
|  |         user_orgs = set(User.objects.scalar('organization')) | ||||||
|  |         orgs = Organization.objects(id__in=user_orgs).scalar('name') | ||||||
|  |         self.assertEqual(list(orgs), ['White House']) | ||||||
|  | 
 | ||||||
|  |         # Efficient for generating listings, too. | ||||||
|  |         orgs = Organization.objects.scalar('name').in_bulk(list(user_orgs)) | ||||||
|  |         user_map = User.objects.scalar('name', 'organization') | ||||||
|  |         user_listing = [(user, orgs[org]) for user, org in user_map] | ||||||
|  |         self.assertEqual([("Bob Dole", "White House")], user_listing) | ||||||
|  | 
 | ||||||
|  |     def test_scalar_simple(self): | ||||||
|  |         class TestDoc(Document): | ||||||
|  |             x = IntField() | ||||||
|  |             y = BooleanField() | ||||||
|  | 
 | ||||||
|  |         TestDoc.drop_collection() | ||||||
|  | 
 | ||||||
|  |         TestDoc(x=10, y=True).save() | ||||||
|  |         TestDoc(x=20, y=False).save() | ||||||
|  |         TestDoc(x=30, y=True).save() | ||||||
|  | 
 | ||||||
|  |         plist = list(TestDoc.objects.scalar('x', 'y')) | ||||||
|  | 
 | ||||||
|  |         self.assertEqual(len(plist), 3) | ||||||
|  |         self.assertEqual(plist[0], (10, True)) | ||||||
|  |         self.assertEqual(plist[1], (20, False)) | ||||||
|  |         self.assertEqual(plist[2], (30, True)) | ||||||
|  | 
 | ||||||
|  |         class UserDoc(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             age = IntField() | ||||||
|  | 
 | ||||||
|  |         UserDoc.drop_collection() | ||||||
|  | 
 | ||||||
|  |         UserDoc(name="Wilson Jr", age=19).save() | ||||||
|  |         UserDoc(name="Wilson", age=43).save() | ||||||
|  |         UserDoc(name="Eliana", age=37).save() | ||||||
|  |         UserDoc(name="Tayza", age=15).save() | ||||||
|  | 
 | ||||||
|  |         ulist = list(UserDoc.objects.scalar('name', 'age')) | ||||||
|  | 
 | ||||||
|  |         self.assertEqual(ulist, [ | ||||||
|  |                 (u'Wilson Jr', 19), | ||||||
|  |                 (u'Wilson', 43), | ||||||
|  |                 (u'Eliana', 37), | ||||||
|  |                 (u'Tayza', 15)]) | ||||||
|  | 
 | ||||||
|  |         ulist = list(UserDoc.objects.scalar('name').order_by('age')) | ||||||
|  | 
 | ||||||
|  |         self.assertEqual(ulist, [ | ||||||
|  |                 (u'Tayza'), | ||||||
|  |                 (u'Wilson Jr'), | ||||||
|  |                 (u'Eliana'), | ||||||
|  |                 (u'Wilson')]) | ||||||
|  | 
 | ||||||
|  |     def test_scalar_embedded(self): | ||||||
|  |         class Profile(EmbeddedDocument): | ||||||
|  |             name = StringField() | ||||||
|  |             age = IntField() | ||||||
|  | 
 | ||||||
|  |         class Locale(EmbeddedDocument): | ||||||
|  |             city = StringField() | ||||||
|  |             country = StringField() | ||||||
|  | 
 | ||||||
|  |         class Person(Document): | ||||||
|  |             profile = EmbeddedDocumentField(Profile) | ||||||
|  |             locale = EmbeddedDocumentField(Locale) | ||||||
|  | 
 | ||||||
|  |         Person.drop_collection() | ||||||
|  | 
 | ||||||
|  |         Person(profile=Profile(name="Wilson Jr", age=19), | ||||||
|  |                locale=Locale(city="Corumba-GO", country="Brazil")).save() | ||||||
|  | 
 | ||||||
|  |         Person(profile=Profile(name="Gabriel Falcao", age=23), | ||||||
|  |                locale=Locale(city="New York", country="USA")).save() | ||||||
|  | 
 | ||||||
|  |         Person(profile=Profile(name="Lincoln de souza", age=28), | ||||||
|  |                locale=Locale(city="Belo Horizonte", country="Brazil")).save() | ||||||
|  | 
 | ||||||
|  |         Person(profile=Profile(name="Walter cruz", age=30), | ||||||
|  |                locale=Locale(city="Brasilia", country="Brazil")).save() | ||||||
|  | 
 | ||||||
|  |         self.assertEqual( | ||||||
|  |             list(Person.objects.order_by('profile__age').scalar('profile__name')), | ||||||
|  |             [u'Wilson Jr', u'Gabriel Falcao', u'Lincoln de souza', u'Walter cruz']) | ||||||
|  | 
 | ||||||
|  |         ulist = list(Person.objects.order_by('locale.city') | ||||||
|  |                      .scalar('profile__name', 'profile__age', 'locale__city')) | ||||||
|  |         self.assertEqual(ulist, | ||||||
|  |                          [(u'Lincoln de souza', 28, u'Belo Horizonte'), | ||||||
|  |                           (u'Walter cruz', 30, u'Brasilia'), | ||||||
|  |                           (u'Wilson Jr', 19, u'Corumba-GO'), | ||||||
|  |                           (u'Gabriel Falcao', 23, u'New York')]) | ||||||
|  | 
 | ||||||
|  |     def test_scalar_decimal(self): | ||||||
|  |         from decimal import Decimal | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             rating = DecimalField() | ||||||
|  | 
 | ||||||
|  |         Person.drop_collection() | ||||||
|  |         Person(name="Wilson Jr", rating=Decimal('1.0')).save() | ||||||
|  | 
 | ||||||
|  |         ulist = list(Person.objects.scalar('name', 'rating')) | ||||||
|  |         self.assertEqual(ulist, [(u'Wilson Jr', Decimal('1.0'))]) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  |     def test_scalar_reference_field(self): | ||||||
|  |         class State(Document): | ||||||
|  |             name = StringField() | ||||||
|  | 
 | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             state = ReferenceField(State) | ||||||
|  | 
 | ||||||
|  |         State.drop_collection() | ||||||
|  |         Person.drop_collection() | ||||||
|  | 
 | ||||||
|  |         s1 = State(name="Goias") | ||||||
|  |         s1.save() | ||||||
|  | 
 | ||||||
|  |         Person(name="Wilson JR", state=s1).save() | ||||||
|  | 
 | ||||||
|  |         plist = list(Person.objects.scalar('name', 'state')) | ||||||
|  |         self.assertEqual(plist, [(u'Wilson JR', s1)]) | ||||||
|  | 
 | ||||||
|  |     def test_scalar_generic_reference_field(self): | ||||||
|  |         class State(Document): | ||||||
|  |             name = StringField() | ||||||
|  | 
 | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             state = GenericReferenceField() | ||||||
|  | 
 | ||||||
|  |         State.drop_collection() | ||||||
|  |         Person.drop_collection() | ||||||
|  | 
 | ||||||
|  |         s1 = State(name="Goias") | ||||||
|  |         s1.save() | ||||||
|  | 
 | ||||||
|  |         Person(name="Wilson JR", state=s1).save() | ||||||
|  | 
 | ||||||
|  |         plist = list(Person.objects.scalar('name', 'state')) | ||||||
|  |         self.assertEqual(plist, [(u'Wilson JR', s1)]) | ||||||
|  | 
 | ||||||
|  |     def test_scalar_db_field(self): | ||||||
|  | 
 | ||||||
|  |         class TestDoc(Document): | ||||||
|  |             x = IntField() | ||||||
|  |             y = BooleanField() | ||||||
|  | 
 | ||||||
|  |         TestDoc.drop_collection() | ||||||
|  | 
 | ||||||
|  |         TestDoc(x=10, y=True).save() | ||||||
|  |         TestDoc(x=20, y=False).save() | ||||||
|  |         TestDoc(x=30, y=True).save() | ||||||
|  | 
 | ||||||
|  |         plist = list(TestDoc.objects.scalar('x', 'y')) | ||||||
|  |         self.assertEqual(len(plist), 3) | ||||||
|  |         self.assertEqual(plist[0], (10, True)) | ||||||
|  |         self.assertEqual(plist[1], (20, False)) | ||||||
|  |         self.assertEqual(plist[2], (30, True)) | ||||||
|  | 
 | ||||||
|  |     def test_scalar_cursor_behaviour(self): | ||||||
|  |         """Ensure that a query returns a valid set of results. | ||||||
|  |         """ | ||||||
|  |         person1 = self.Person(name="User A", age=20) | ||||||
|  |         person1.save() | ||||||
|  |         person2 = self.Person(name="User B", age=30) | ||||||
|  |         person2.save() | ||||||
|  | 
 | ||||||
|  |         # Find all people in the collection | ||||||
|  |         people = self.Person.objects.scalar('name') | ||||||
|  |         self.assertEqual(len(people), 2) | ||||||
|  |         results = list(people) | ||||||
|  |         self.assertEqual(results[0], "User A") | ||||||
|  |         self.assertEqual(results[1], "User B") | ||||||
|  | 
 | ||||||
|  |         # Use a query to filter the people found to just person1 | ||||||
|  |         people = self.Person.objects(age=20).scalar('name') | ||||||
|  |         self.assertEqual(len(people), 1) | ||||||
|  |         person = people.next() | ||||||
|  |         self.assertEqual(person, "User A") | ||||||
|  | 
 | ||||||
|  |         # Test limit | ||||||
|  |         people = list(self.Person.objects.limit(1).scalar('name')) | ||||||
|  |         self.assertEqual(len(people), 1) | ||||||
|  |         self.assertEqual(people[0], 'User A') | ||||||
|  | 
 | ||||||
|  |         # Test skip | ||||||
|  |         people = list(self.Person.objects.skip(1).scalar('name')) | ||||||
|  |         self.assertEqual(len(people), 1) | ||||||
|  |         self.assertEqual(people[0], 'User B') | ||||||
|  | 
 | ||||||
|  |         person3 = self.Person(name="User C", age=40) | ||||||
|  |         person3.save() | ||||||
|  | 
 | ||||||
|  |         # Test slice limit | ||||||
|  |         people = list(self.Person.objects[:2].scalar('name')) | ||||||
|  |         self.assertEqual(len(people), 2) | ||||||
|  |         self.assertEqual(people[0], 'User A') | ||||||
|  |         self.assertEqual(people[1], 'User B') | ||||||
|  | 
 | ||||||
|  |         # Test slice skip | ||||||
|  |         people = list(self.Person.objects[1:].scalar('name')) | ||||||
|  |         self.assertEqual(len(people), 2) | ||||||
|  |         self.assertEqual(people[0], 'User B') | ||||||
|  |         self.assertEqual(people[1], 'User C') | ||||||
|  | 
 | ||||||
|  |         # Test slice limit and skip | ||||||
|  |         people = list(self.Person.objects[1:2].scalar('name')) | ||||||
|  |         self.assertEqual(len(people), 1) | ||||||
|  |         self.assertEqual(people[0], 'User B') | ||||||
|  | 
 | ||||||
|  |         people = list(self.Person.objects[1:1].scalar('name')) | ||||||
|  |         self.assertEqual(len(people), 0) | ||||||
|  | 
 | ||||||
|  |         # Test slice out of range | ||||||
|  |         people = list(self.Person.objects.scalar('name')[80000:80001]) | ||||||
|  |         self.assertEqual(len(people), 0) | ||||||
|  | 
 | ||||||
|  |         # Test larger slice __repr__ | ||||||
|  |         self.Person.objects.delete() | ||||||
|  |         for i in xrange(55): | ||||||
|  |             self.Person(name='A%s' % i, age=i).save() | ||||||
|  | 
 | ||||||
|  |         self.assertEqual(len(self.Person.objects.scalar('name')), 55) | ||||||
|  |         self.assertEqual("A0", "%s" % self.Person.objects.order_by('name').scalar('name').first()) | ||||||
|  |         self.assertEqual("A0", "%s" % self.Person.objects.scalar('name').order_by('name')[0]) | ||||||
|  |         self.assertEqual("[u'A1', u'A2']",  "%s" % self.Person.objects.order_by('age').scalar('name')[1:3]) | ||||||
|  |         self.assertEqual("[u'A51', u'A52']",  "%s" % self.Person.objects.order_by('age').scalar('name')[51:53]) | ||||||
|  | 
 | ||||||
|  |         # with_id and in_bulk | ||||||
|  |         person = self.Person.objects.order_by('name').first() | ||||||
|  |         self.assertEqual("A0", "%s" % self.Person.objects.scalar('name').with_id(person.id)) | ||||||
|  | 
 | ||||||
|  |         pks = self.Person.objects.order_by('age').scalar('pk')[1:3] | ||||||
|  |         self.assertEqual("[u'A1', u'A2']",  "%s" % sorted(self.Person.objects.scalar('name').in_bulk(list(pks)).values())) | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
| class QTest(unittest.TestCase): | class QTest(unittest.TestCase): | ||||||
| 
 | 
 | ||||||
| @@ -2790,6 +3303,30 @@ class QueryFieldListTest(unittest.TestCase): | |||||||
|         q += QueryFieldList(fields=['a'], value={"$slice": 5}) |         q += QueryFieldList(fields=['a'], value={"$slice": 5}) | ||||||
|         self.assertEqual(q.as_dict(), {'a': {"$slice": 5}}) |         self.assertEqual(q.as_dict(), {'a': {"$slice": 5}}) | ||||||
| 
 | 
 | ||||||
|  |     def test_elem_match(self): | ||||||
|  |         class Foo(EmbeddedDocument): | ||||||
|  |             shape = StringField() | ||||||
|  |             color = StringField() | ||||||
|  |             trick = BooleanField() | ||||||
|  |             meta = {'allow_inheritance': False} | ||||||
|  | 
 | ||||||
|  |         class Bar(Document): | ||||||
|  |             foo = ListField(EmbeddedDocumentField(Foo)) | ||||||
|  |             meta = {'allow_inheritance': False} | ||||||
|  | 
 | ||||||
|  |         Bar.drop_collection() | ||||||
|  | 
 | ||||||
|  |         b1 = Bar(foo=[Foo(shape= "square", color ="purple", thick = False), | ||||||
|  |                       Foo(shape= "circle", color ="red", thick = True)]) | ||||||
|  |         b1.save() | ||||||
|  | 
 | ||||||
|  |         b2 = Bar(foo=[Foo(shape= "square", color ="red", thick = True), | ||||||
|  |                       Foo(shape= "circle", color ="purple", thick = False)]) | ||||||
|  |         b2.save() | ||||||
|  | 
 | ||||||
|  |         ak = list(Bar.objects(foo__match={'shape': "square", "color": "purple"})) | ||||||
|  |         self.assertEqual([b1], ak) | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
| if __name__ == '__main__': | if __name__ == '__main__': | ||||||
|     unittest.main() |     unittest.main() | ||||||
							
								
								
									
										32
									
								
								tests/test_replicaset_connection.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								tests/test_replicaset_connection.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,32 @@ | |||||||
|  | import unittest | ||||||
|  | import pymongo | ||||||
|  | from pymongo import ReadPreference, ReplicaSetConnection | ||||||
|  |  | ||||||
|  | import mongoengine | ||||||
|  | from mongoengine import * | ||||||
|  | from mongoengine.connection import get_db, get_connection, ConnectionError | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class ConnectionTest(unittest.TestCase): | ||||||
|  |  | ||||||
|  |     def tearDown(self): | ||||||
|  |         mongoengine.connection._connection_settings = {} | ||||||
|  |         mongoengine.connection._connections = {} | ||||||
|  |         mongoengine.connection._dbs = {} | ||||||
|  |  | ||||||
|  |     def test_replicaset_uri_passes_read_preference(self): | ||||||
|  |         """Requires a replica set called "rs" on port 27017 | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         try: | ||||||
|  |             conn = connect(db='mongoenginetest', host="mongodb://localhost/mongoenginetest?replicaSet=rs", read_preference=ReadPreference.SECONDARY_ONLY) | ||||||
|  |         except ConnectionError, e: | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         if not isinstance(conn, ReplicaSetConnection): | ||||||
|  |             return | ||||||
|  |  | ||||||
|  |         self.assertEquals(conn.read_preference, ReadPreference.SECONDARY_ONLY) | ||||||
|  |  | ||||||
|  | if __name__ == '__main__': | ||||||
|  |     unittest.main() | ||||||
| @@ -56,6 +56,18 @@ class SignalTests(unittest.TestCase): | |||||||
|             @classmethod |             @classmethod | ||||||
|             def post_delete(cls, sender, document, **kwargs): |             def post_delete(cls, sender, document, **kwargs): | ||||||
|                 signal_output.append('post_delete signal, %s' % document) |                 signal_output.append('post_delete signal, %s' % document) | ||||||
|  | 
 | ||||||
|  |             @classmethod | ||||||
|  |             def pre_bulk_insert(cls, sender, documents, **kwargs): | ||||||
|  |                 signal_output.append('pre_bulk_insert signal, %s' % documents) | ||||||
|  | 
 | ||||||
|  |             @classmethod | ||||||
|  |             def post_bulk_insert(cls, sender, documents, **kwargs): | ||||||
|  |                 signal_output.append('post_bulk_insert signal, %s' % documents) | ||||||
|  |                 if kwargs.get('loaded', False): | ||||||
|  |                     signal_output.append('Is loaded') | ||||||
|  |                 else: | ||||||
|  |                     signal_output.append('Not loaded') | ||||||
|         self.Author = Author |         self.Author = Author | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @@ -104,7 +116,9 @@ class SignalTests(unittest.TestCase): | |||||||
|             len(signals.pre_save.receivers), |             len(signals.pre_save.receivers), | ||||||
|             len(signals.post_save.receivers), |             len(signals.post_save.receivers), | ||||||
|             len(signals.pre_delete.receivers), |             len(signals.pre_delete.receivers), | ||||||
|             len(signals.post_delete.receivers) |             len(signals.post_delete.receivers), | ||||||
|  |             len(signals.pre_bulk_insert.receivers), | ||||||
|  |             len(signals.post_bulk_insert.receivers), | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         signals.pre_init.connect(Author.pre_init, sender=Author) |         signals.pre_init.connect(Author.pre_init, sender=Author) | ||||||
| @@ -113,6 +127,8 @@ class SignalTests(unittest.TestCase): | |||||||
|         signals.post_save.connect(Author.post_save, sender=Author) |         signals.post_save.connect(Author.post_save, sender=Author) | ||||||
|         signals.pre_delete.connect(Author.pre_delete, sender=Author) |         signals.pre_delete.connect(Author.pre_delete, sender=Author) | ||||||
|         signals.post_delete.connect(Author.post_delete, sender=Author) |         signals.post_delete.connect(Author.post_delete, sender=Author) | ||||||
|  |         signals.pre_bulk_insert.connect(Author.pre_bulk_insert, sender=Author) | ||||||
|  |         signals.post_bulk_insert.connect(Author.post_bulk_insert, sender=Author) | ||||||
| 
 | 
 | ||||||
|         signals.pre_init.connect(Another.pre_init, sender=Another) |         signals.pre_init.connect(Another.pre_init, sender=Another) | ||||||
|         signals.post_init.connect(Another.post_init, sender=Another) |         signals.post_init.connect(Another.post_init, sender=Another) | ||||||
| @@ -128,6 +144,8 @@ class SignalTests(unittest.TestCase): | |||||||
|         signals.pre_delete.disconnect(self.Author.pre_delete) |         signals.pre_delete.disconnect(self.Author.pre_delete) | ||||||
|         signals.post_save.disconnect(self.Author.post_save) |         signals.post_save.disconnect(self.Author.post_save) | ||||||
|         signals.pre_save.disconnect(self.Author.pre_save) |         signals.pre_save.disconnect(self.Author.pre_save) | ||||||
|  |         signals.pre_bulk_insert.disconnect(self.Author.pre_bulk_insert) | ||||||
|  |         signals.post_bulk_insert.disconnect(self.Author.post_bulk_insert) | ||||||
| 
 | 
 | ||||||
|         signals.pre_init.disconnect(self.Another.pre_init) |         signals.pre_init.disconnect(self.Another.pre_init) | ||||||
|         signals.post_init.disconnect(self.Another.post_init) |         signals.post_init.disconnect(self.Another.post_init) | ||||||
| @@ -143,7 +161,9 @@ class SignalTests(unittest.TestCase): | |||||||
|             len(signals.pre_save.receivers), |             len(signals.pre_save.receivers), | ||||||
|             len(signals.post_save.receivers), |             len(signals.post_save.receivers), | ||||||
|             len(signals.pre_delete.receivers), |             len(signals.pre_delete.receivers), | ||||||
|             len(signals.post_delete.receivers) |             len(signals.post_delete.receivers), | ||||||
|  |             len(signals.pre_bulk_insert.receivers), | ||||||
|  |             len(signals.post_bulk_insert.receivers), | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         self.assertEqual(self.pre_signals, post_signals) |         self.assertEqual(self.pre_signals, post_signals) | ||||||
| @@ -154,6 +174,14 @@ class SignalTests(unittest.TestCase): | |||||||
|         def create_author(): |         def create_author(): | ||||||
|             a1 = self.Author(name='Bill Shakespeare') |             a1 = self.Author(name='Bill Shakespeare') | ||||||
| 
 | 
 | ||||||
|  |         def bulk_create_author_with_load(): | ||||||
|  |             a1 = self.Author(name='Bill Shakespeare') | ||||||
|  |             self.Author.objects.insert([a1], load_bulk=True) | ||||||
|  | 
 | ||||||
|  |         def bulk_create_author_without_load(): | ||||||
|  |             a1 = self.Author(name='Bill Shakespeare') | ||||||
|  |             self.Author.objects.insert([a1], load_bulk=False) | ||||||
|  | 
 | ||||||
|         self.assertEqual(self.get_signal_output(create_author), [ |         self.assertEqual(self.get_signal_output(create_author), [ | ||||||
|             "pre_init signal, Author", |             "pre_init signal, Author", | ||||||
|             "{'name': 'Bill Shakespeare'}", |             "{'name': 'Bill Shakespeare'}", | ||||||
| @@ -179,3 +207,24 @@ class SignalTests(unittest.TestCase): | |||||||
|             'pre_delete signal, William Shakespeare', |             'pre_delete signal, William Shakespeare', | ||||||
|             'post_delete signal, William Shakespeare', |             'post_delete signal, William Shakespeare', | ||||||
|         ]) |         ]) | ||||||
|  | 
 | ||||||
|  |         signal_output = self.get_signal_output(bulk_create_author_with_load) | ||||||
|  | 
 | ||||||
|  |         # The output of this signal is not entirely deterministic. The reloaded | ||||||
|  |         # object will have an object ID. Hence, we only check part of the output | ||||||
|  |         self.assertEquals(signal_output[3], | ||||||
|  |             "pre_bulk_insert signal, [<Author: Bill Shakespeare>]") | ||||||
|  |         self.assertEquals(signal_output[-2:], | ||||||
|  |             ["post_bulk_insert signal, [<Author: Bill Shakespeare>]", | ||||||
|  |              "Is loaded",]) | ||||||
|  | 
 | ||||||
|  |         self.assertEqual(self.get_signal_output(bulk_create_author_without_load), [ | ||||||
|  |             "pre_init signal, Author", | ||||||
|  |             "{'name': 'Bill Shakespeare'}", | ||||||
|  |             "post_init signal, Bill Shakespeare", | ||||||
|  |             "pre_bulk_insert signal, [<Author: Bill Shakespeare>]", | ||||||
|  |             "post_bulk_insert signal, [<Author: Bill Shakespeare>]", | ||||||
|  |             "Not loaded", | ||||||
|  |         ]) | ||||||
|  | 
 | ||||||
|  |         self.Author.objects.delete() | ||||||
		Reference in New Issue
	
	Block a user