Compare commits
	
		
			1 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 3da37fbf6e | 
							
								
								
									
										14
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										14
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -1,17 +1,9 @@ | |||||||
| .* | *.pyc | ||||||
| !.gitignore | .*.swp | ||||||
| *~ |  | ||||||
| *.py[co] |  | ||||||
| .*.sw[po] |  | ||||||
| *.egg | *.egg | ||||||
| docs/.build | docs/.build | ||||||
| docs/_build | docs/_build | ||||||
| build/ | build/ | ||||||
| dist/ | dist/ | ||||||
| mongoengine.egg-info/ | mongoengine.egg-info/ | ||||||
| env/ | env/ | ||||||
| .settings |  | ||||||
| .project |  | ||||||
| .pydevproject |  | ||||||
| tests/test_bugfix.py |  | ||||||
| htmlcov/ |  | ||||||
							
								
								
									
										12
									
								
								.travis.yml
									
									
									
									
									
								
							
							
						
						
									
										12
									
								
								.travis.yml
									
									
									
									
									
								
							| @@ -1,12 +0,0 @@ | |||||||
| # http://travis-ci.org/#!/MongoEngine/mongoengine |  | ||||||
| language: python |  | ||||||
| python: |  | ||||||
|     - 2.6 |  | ||||||
|     - 2.7 |  | ||||||
| install: |  | ||||||
|     - sudo apt-get install zlib1g zlib1g-dev |  | ||||||
|     - sudo ln -s /usr/lib/i386-linux-gnu/libz.so /usr/lib/ |  | ||||||
|     - pip install PIL --use-mirrors ; true |  | ||||||
|     - python setup.py install |  | ||||||
| script: |  | ||||||
|     - python setup.py test |  | ||||||
							
								
								
									
										111
									
								
								AUTHORS
									
									
									
									
									
								
							
							
						
						
									
										111
									
								
								AUTHORS
									
									
									
									
									
								
							| @@ -1,116 +1,5 @@ | |||||||
| The PRIMARY AUTHORS are (and/or have been): |  | ||||||
|  |  | ||||||
| Ross Lawley <ross.lawley@gmail.com> |  | ||||||
| Harry Marr <harry@hmarr.com> | Harry Marr <harry@hmarr.com> | ||||||
| Matt Dennewitz <mattdennewitz@gmail.com> | Matt Dennewitz <mattdennewitz@gmail.com> | ||||||
| Deepak Thukral <iapain@yahoo.com> | Deepak Thukral <iapain@yahoo.com> | ||||||
| Florian Schlachter <flori@n-schlachter.de> | Florian Schlachter <flori@n-schlachter.de> | ||||||
| Steve Challis <steve@stevechallis.com> | Steve Challis <steve@stevechallis.com> | ||||||
| Wilson Júnior <wilsonpjunior@gmail.com> |  | ||||||
| Dan Crosta https://github.com/dcrosta |  | ||||||
|  |  | ||||||
| CONTRIBUTORS |  | ||||||
|  |  | ||||||
| Dervived from the git logs, inevitably incomplete but all of whom and others |  | ||||||
| have submitted patches, reported bugs and generally helped make MongoEngine |  | ||||||
| that much better: |  | ||||||
|  |  | ||||||
|  * Harry Marr |  | ||||||
|  * Ross Lawley |  | ||||||
|  * blackbrrr |  | ||||||
|  * Florian Schlachter |  | ||||||
|  * Vincent Driessen |  | ||||||
|  * Steve Challis |  | ||||||
|  * flosch |  | ||||||
|  * Deepak Thukral |  | ||||||
|  * Colin Howe |  | ||||||
|  * Wilson Júnior |  | ||||||
|  * Alistair Roche |  | ||||||
|  * Dan Crosta |  | ||||||
|  * Viktor Kerkez |  | ||||||
|  * Stephan Jaekel |  | ||||||
|  * Rached Ben Mustapha |  | ||||||
|  * Greg Turner |  | ||||||
|  * Daniel Hasselrot |  | ||||||
|  * Mircea Pasoi |  | ||||||
|  * Matt Chisholm |  | ||||||
|  * James Punteney |  | ||||||
|  * TimothéePeignier |  | ||||||
|  * Stuart Rackham |  | ||||||
|  * Serge Matveenko |  | ||||||
|  * Matt Dennewitz |  | ||||||
|  * Don Spaulding |  | ||||||
|  * Ales Zoulek |  | ||||||
|  * sshwsfc |  | ||||||
|  * sib |  | ||||||
|  * Samuel Clay |  | ||||||
|  * Nick Vlku |  | ||||||
|  * martin |  | ||||||
|  * Flavio Amieiro |  | ||||||
|  * Анхбаяр Лхагвадорж |  | ||||||
|  * Zak Johnson |  | ||||||
|  * Victor Farazdagi |  | ||||||
|  * vandersonmota |  | ||||||
|  * Theo Julienne |  | ||||||
|  * sp |  | ||||||
|  * Slavi Pantaleev |  | ||||||
|  * Richard Henry |  | ||||||
|  * Nicolas Perriault |  | ||||||
|  * Nick Vlku Jr |  | ||||||
|  * Michael Henson |  | ||||||
|  * Leo Honkanen |  | ||||||
|  * kuno |  | ||||||
|  * Josh Ourisman |  | ||||||
|  * Jaime |  | ||||||
|  * Igor Ivanov |  | ||||||
|  * Gregg Lind |  | ||||||
|  * Gareth Lloyd |  | ||||||
|  * Albert Choi |  | ||||||
|  * John Arnfield |  | ||||||
|  * grubberr |  | ||||||
|  * Paul Aliagas |  | ||||||
|  * Paul Cunnane |  | ||||||
|  * Julien Rebetez |  | ||||||
|  * Marc Tamlyn |  | ||||||
|  * Karim Allah |  | ||||||
|  * Adam Parrish |  | ||||||
|  * jpfarias |  | ||||||
|  * jonrscott |  | ||||||
|  * Alice Zoë Bevan-McGregor |  | ||||||
|  * Stephen Young |  | ||||||
|  * tkloc |  | ||||||
|  * aid |  | ||||||
|  * yamaneko1212 |  | ||||||
|  * dave mankoff |  | ||||||
|  * Alexander G. Morano |  | ||||||
|  * jwilder |  | ||||||
|  * Joe Shaw |  | ||||||
|  * Adam Flynn |  | ||||||
|  * Ankhbayar |  | ||||||
|  * Jan Schrewe |  | ||||||
|  * David Koblas |  | ||||||
|  * Crittercism |  | ||||||
|  * Alvin Liang |  | ||||||
|  * andrewmlevy |  | ||||||
|  * Chris Faulkner |  | ||||||
|  * Ashwin Purohit |  | ||||||
|  * Shalabh Aggarwal |  | ||||||
|  * Chris Williams |  | ||||||
|  * Robert Kajic |  | ||||||
|  * Jacob Peddicord |  | ||||||
|  * Nils Hasenbanck |  | ||||||
|  * mostlystatic |  | ||||||
|  * Greg Banks |  | ||||||
|  * swashbuckler |  | ||||||
|  * Adam Reeve |  | ||||||
|  * Anthony Nemitz |  | ||||||
|  * deignacio |  | ||||||
|  * shaunduncan |  | ||||||
|  * Meir Kriheli |  | ||||||
|  * Andrey Fedoseev |  | ||||||
|  * aparajita |  | ||||||
|  * Tristan Escalada |  | ||||||
|  * Alexander Koshelev |  | ||||||
|  * Jaime Irurzun |  | ||||||
|  * Alexandre González |  | ||||||
|  * Thomas Steinacher |  | ||||||
							
								
								
									
										25
									
								
								README.rst
									
									
									
									
									
								
							
							
						
						
									
										25
									
								
								README.rst
									
									
									
									
									
								
							| @@ -3,29 +3,25 @@ MongoEngine | |||||||
| =========== | =========== | ||||||
| :Info: MongoEngine is an ORM-like layer on top of PyMongo. | :Info: MongoEngine is an ORM-like layer on top of PyMongo. | ||||||
| :Author: Harry Marr (http://github.com/hmarr) | :Author: Harry Marr (http://github.com/hmarr) | ||||||
| :Maintainer: Ross Lawley (http://github.com/rozza) |  | ||||||
|  |  | ||||||
| .. image:: https://secure.travis-ci.org/MongoEngine/mongoengine.png?branch=master |  | ||||||
|   :target: http://travis-ci.org/MongoEngine/mongoengine |  | ||||||
|  |  | ||||||
| About | About | ||||||
| ===== | ===== | ||||||
| MongoEngine is a Python Object-Document Mapper for working with MongoDB. | MongoEngine is a Python Object-Document Mapper for working with MongoDB.  | ||||||
| Documentation available at http://mongoengine-odm.rtfd.org - there is currently | Documentation available at http://hmarr.com/mongoengine/ - there is currently  | ||||||
| a `tutorial <http://readthedocs.org/docs/mongoengine-odm/en/latest/tutorial.html>`_, a `user guide | a `tutorial <http://hmarr.com/mongoengine/tutorial.html>`_, a `user guide  | ||||||
| <http://readthedocs.org/docs/mongoengine-odm/en/latest/userguide.html>`_ and an `API reference | <http://hmarr.com/mongoengine/userguide.html>`_ and an `API reference | ||||||
| <http://readthedocs.org/docs/mongoengine-odm/en/latest/apireference.html>`_. | <http://hmarr.com/mongoengine/apireference.html>`_. | ||||||
|  |  | ||||||
| Installation | Installation | ||||||
| ============ | ============ | ||||||
| If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ | If you have `setuptools <http://peak.telecommunity.com/DevCenter/setuptools>`_ | ||||||
| you can use ``easy_install -U mongoengine``. Otherwise, you can download the | you can use ``easy_install -U mongoengine``. Otherwise, you can download the | ||||||
| source from `GitHub <http://github.com/MongoEngine/mongoengine>`_ and run ``python | source from `GitHub <http://github.com/hmarr/mongoengine>`_ and run ``python | ||||||
| setup.py install``. | setup.py install``. | ||||||
|  |  | ||||||
| Dependencies | Dependencies | ||||||
| ============ | ============ | ||||||
| - pymongo 2.1.1+ | - pymongo 1.1+ | ||||||
| - sphinx (optional - for documentation generation) | - sphinx (optional - for documentation generation) | ||||||
|  |  | ||||||
| Examples | Examples | ||||||
| @@ -88,15 +84,14 @@ the standard port, and run ``python setup.py test``. | |||||||
|  |  | ||||||
| Community | Community | ||||||
| ========= | ========= | ||||||
| - `MongoEngine Users mailing list | - `MongoEngine Users mailing list  | ||||||
|   <http://groups.google.com/group/mongoengine-users>`_ |   <http://groups.google.com/group/mongoengine-users>`_ | ||||||
| - `MongoEngine Developers mailing list | - `MongoEngine Developers mailing list  | ||||||
|   <http://groups.google.com/group/mongoengine-dev>`_ |   <http://groups.google.com/group/mongoengine-dev>`_ | ||||||
| - `#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_ | - `#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_ | ||||||
|  |  | ||||||
| Contributing | Contributing | ||||||
| ============ | ============ | ||||||
| The source is available on `GitHub <http://github.com/MongoEngine/mongoengine>`_ - to | The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_ - to | ||||||
| contribute to the project, fork it on GitHub and send a pull request, all | contribute to the project, fork it on GitHub and send a pull request, all | ||||||
| contributions and suggestions are welcome! | contributions and suggestions are welcome! | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										182
									
								
								benchmark.py
									
									
									
									
									
								
							
							
						
						
									
										182
									
								
								benchmark.py
									
									
									
									
									
								
							| @@ -1,182 +0,0 @@ | |||||||
| #!/usr/bin/env python |  | ||||||
|  |  | ||||||
| import timeit |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def cprofile_main(): |  | ||||||
|     from pymongo import Connection |  | ||||||
|     connection = Connection() |  | ||||||
|     connection.drop_database('timeit_test') |  | ||||||
|     connection.disconnect() |  | ||||||
|  |  | ||||||
|     from mongoengine import Document, DictField, connect |  | ||||||
|     connect("timeit_test") |  | ||||||
|  |  | ||||||
|     class Noddy(Document): |  | ||||||
|         fields = DictField() |  | ||||||
|  |  | ||||||
|     for i in xrange(1): |  | ||||||
|         noddy = Noddy() |  | ||||||
|         for j in range(20): |  | ||||||
|             noddy.fields["key" + str(j)] = "value " + str(j) |  | ||||||
|         noddy.save() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def main(): |  | ||||||
|     """ |  | ||||||
|     0.4 Performance Figures ... |  | ||||||
|  |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - Pymongo |  | ||||||
|     1.1141769886 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine |  | ||||||
|     2.37724113464 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False |  | ||||||
|     1.92479610443 |  | ||||||
|  |  | ||||||
|     0.5.X |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - Pymongo |  | ||||||
|     1.10552310944 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine |  | ||||||
|     16.5169169903 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False |  | ||||||
|     14.9446101189 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False |  | ||||||
|     14.912801981 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, force=True |  | ||||||
|     14.9617750645 |  | ||||||
|  |  | ||||||
|     Performance |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - Pymongo |  | ||||||
|     1.10072994232 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine |  | ||||||
|     5.27341103554 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False |  | ||||||
|     4.49365401268 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False |  | ||||||
|     4.43459296227 |  | ||||||
|     ---------------------------------------------------------------------------------------------------- |  | ||||||
|     Creating 10000 dictionaries - MongoEngine, force=True |  | ||||||
|     4.40114378929 |  | ||||||
|     """ |  | ||||||
|  |  | ||||||
|     setup = """ |  | ||||||
| from pymongo import Connection |  | ||||||
| connection = Connection() |  | ||||||
| connection.drop_database('timeit_test') |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| from pymongo import Connection |  | ||||||
| connection = Connection() |  | ||||||
|  |  | ||||||
| db = connection.timeit_test |  | ||||||
| noddy = db.noddy |  | ||||||
|  |  | ||||||
| for i in xrange(10000): |  | ||||||
|     example = {'fields': {}} |  | ||||||
|     for j in range(20): |  | ||||||
|         example['fields']["key"+str(j)] = "value "+str(j) |  | ||||||
|  |  | ||||||
|     noddy.insert(example) |  | ||||||
|  |  | ||||||
| myNoddys = noddy.find() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print "-" * 100 |  | ||||||
|     print """Creating 10000 dictionaries - Pymongo""" |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print t.timeit(1) |  | ||||||
|  |  | ||||||
|     setup = """ |  | ||||||
| from pymongo import Connection |  | ||||||
| connection = Connection() |  | ||||||
| connection.drop_database('timeit_test') |  | ||||||
| connection.disconnect() |  | ||||||
|  |  | ||||||
| from mongoengine import Document, DictField, connect |  | ||||||
| connect("timeit_test") |  | ||||||
|  |  | ||||||
| class Noddy(Document): |  | ||||||
|     fields = DictField() |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in xrange(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save() |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print "-" * 100 |  | ||||||
|     print """Creating 10000 dictionaries - MongoEngine""" |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print t.timeit(1) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in xrange(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save(safe=False, validate=False) |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print "-" * 100 |  | ||||||
|     print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False""" |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print t.timeit(1) |  | ||||||
|  |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in xrange(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save(safe=False, validate=False, cascade=False) |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print "-" * 100 |  | ||||||
|     print """Creating 10000 dictionaries - MongoEngine, safe=False, validate=False, cascade=False""" |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print t.timeit(1) |  | ||||||
|  |  | ||||||
|     stmt = """ |  | ||||||
| for i in xrange(10000): |  | ||||||
|     noddy = Noddy() |  | ||||||
|     for j in range(20): |  | ||||||
|         noddy.fields["key"+str(j)] = "value "+str(j) |  | ||||||
|     noddy.save(force_insert=True, safe=False, validate=False, cascade=False) |  | ||||||
|  |  | ||||||
| myNoddys = Noddy.objects() |  | ||||||
| [n for n in myNoddys] # iterate |  | ||||||
| """ |  | ||||||
|  |  | ||||||
|     print "-" * 100 |  | ||||||
|     print """Creating 10000 dictionaries - MongoEngine, force=True""" |  | ||||||
|     t = timeit.Timer(stmt=stmt, setup=setup) |  | ||||||
|     print t.timeit(1) |  | ||||||
|  |  | ||||||
| if __name__ == "__main__": |  | ||||||
|     main() |  | ||||||
| @@ -6,7 +6,6 @@ Connecting | |||||||
| ========== | ========== | ||||||
|  |  | ||||||
| .. autofunction:: mongoengine.connect | .. autofunction:: mongoengine.connect | ||||||
| .. autofunction:: mongoengine.register_connection |  | ||||||
|  |  | ||||||
| Documents | Documents | ||||||
| ========= | ========= | ||||||
| @@ -16,24 +15,15 @@ Documents | |||||||
|  |  | ||||||
|    .. attribute:: objects |    .. attribute:: objects | ||||||
|  |  | ||||||
|       A :class:`~mongoengine.queryset.QuerySet` object that is created lazily |       A :class:`~mongoengine.queryset.QuerySet` object that is created lazily  | ||||||
|       on access. |       on access. | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.EmbeddedDocument | .. autoclass:: mongoengine.EmbeddedDocument | ||||||
|    :members: |    :members: | ||||||
|  |     | ||||||
| .. autoclass:: mongoengine.DynamicDocument |  | ||||||
|    :members: |  | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.DynamicEmbeddedDocument |  | ||||||
|    :members: |  | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.document.MapReduceDocument | .. autoclass:: mongoengine.document.MapReduceDocument | ||||||
|   :members: |   :members: | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.ValidationError |  | ||||||
|   :members: |  | ||||||
|  |  | ||||||
| Querying | Querying | ||||||
| ======== | ======== | ||||||
|  |  | ||||||
| @@ -41,34 +31,40 @@ Querying | |||||||
|    :members: |    :members: | ||||||
|  |  | ||||||
|    .. automethod:: mongoengine.queryset.QuerySet.__call__ |    .. automethod:: mongoengine.queryset.QuerySet.__call__ | ||||||
|  |     | ||||||
| .. autofunction:: mongoengine.queryset.queryset_manager | .. autofunction:: mongoengine.queryset.queryset_manager | ||||||
|  |  | ||||||
| Fields | Fields | ||||||
| ====== | ====== | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.BinaryField |  | ||||||
| .. autoclass:: mongoengine.BooleanField |  | ||||||
| .. autoclass:: mongoengine.ComplexDateTimeField |  | ||||||
| .. autoclass:: mongoengine.DateTimeField |  | ||||||
| .. autoclass:: mongoengine.DecimalField |  | ||||||
| .. autoclass:: mongoengine.DictField |  | ||||||
| .. autoclass:: mongoengine.DynamicField |  | ||||||
| .. autoclass:: mongoengine.EmailField |  | ||||||
| .. autoclass:: mongoengine.EmbeddedDocumentField |  | ||||||
| .. autoclass:: mongoengine.FileField |  | ||||||
| .. autoclass:: mongoengine.FloatField |  | ||||||
| .. autoclass:: mongoengine.GenericEmbeddedDocumentField |  | ||||||
| .. autoclass:: mongoengine.GenericReferenceField |  | ||||||
| .. autoclass:: mongoengine.GeoPointField |  | ||||||
| .. autoclass:: mongoengine.ImageField |  | ||||||
| .. autoclass:: mongoengine.IntField |  | ||||||
| .. autoclass:: mongoengine.ListField |  | ||||||
| .. autoclass:: mongoengine.MapField |  | ||||||
| .. autoclass:: mongoengine.ObjectIdField |  | ||||||
| .. autoclass:: mongoengine.ReferenceField |  | ||||||
| .. autoclass:: mongoengine.SequenceField |  | ||||||
| .. autoclass:: mongoengine.SortedListField |  | ||||||
| .. autoclass:: mongoengine.StringField | .. autoclass:: mongoengine.StringField | ||||||
|  |  | ||||||
| .. autoclass:: mongoengine.URLField | .. autoclass:: mongoengine.URLField | ||||||
| .. autoclass:: mongoengine.UUIDField |  | ||||||
|  | .. autoclass:: mongoengine.IntField | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.FloatField | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.DecimalField | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.BooleanField | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.DateTimeField | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.EmbeddedDocumentField | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.DictField | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.ListField | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.BinaryField | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.ObjectIdField | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.ReferenceField | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.GenericReferenceField | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.FileField | ||||||
|  |  | ||||||
|  | .. autoclass:: mongoengine.GeoPointField | ||||||
|   | |||||||
| @@ -2,259 +2,6 @@ | |||||||
| Changelog | Changelog | ||||||
| ========= | ========= | ||||||
|  |  | ||||||
|  |  | ||||||
| Changes in 0.6.16 |  | ||||||
| ================= |  | ||||||
| - Fixed issue where db_alias wasn't inherited |  | ||||||
|  |  | ||||||
| Changes in 0.6.15 |  | ||||||
| ================= |  | ||||||
| - Updated validation error messages |  | ||||||
| - Added support for null / zero / false values in item_frequencies |  | ||||||
| - Fixed cascade save edge case |  | ||||||
| - Fixed geo index creation through reference fields |  | ||||||
| - Added support for args / kwargs when using @queryset_manager |  | ||||||
| - Deref list custom id fix |  | ||||||
|  |  | ||||||
| Changes in 0.6.14 |  | ||||||
| ================= |  | ||||||
| - Fixed error dict with nested validation |  | ||||||
| - Fixed Int/Float fields and not equals None |  | ||||||
| - Exclude tests from installation |  | ||||||
| - Allow tuples for index meta |  | ||||||
| - Fixed use of str in instance checks |  | ||||||
| - Fixed unicode support in transform update |  | ||||||
| - Added support for add_to_set and each |  | ||||||
|  |  | ||||||
| Changes in 0.6.13 |  | ||||||
| ================ |  | ||||||
| - Fixed EmbeddedDocument db_field validation issue |  | ||||||
| - Fixed StringField unicode issue |  | ||||||
| - Fixes __repr__ modifying the cursor |  | ||||||
|  |  | ||||||
| Changes in 0.6.12 |  | ||||||
| ================= |  | ||||||
| - Fixes scalar lookups for primary_key |  | ||||||
| - Fixes error with _delta handling DBRefs |  | ||||||
|  |  | ||||||
| Changes in 0.6.11 |  | ||||||
| ================== |  | ||||||
| - Fixed inconsistency handling None values field attrs |  | ||||||
| - Fixed map_field embedded db_field issue |  | ||||||
| - Fixed .save() _delta issue with DbRefs |  | ||||||
| - Fixed Django TestCase |  | ||||||
| - Added cmp to Embedded Document |  | ||||||
| - Added PULL reverse_delete_rule |  | ||||||
| - Fixed CASCADE delete bug |  | ||||||
| - Fixed db_field data load error |  | ||||||
| - Fixed recursive save with FileField |  | ||||||
|  |  | ||||||
| Changes in 0.6.10 |  | ||||||
| ================= |  | ||||||
| - Fixed basedict / baselist to return super(..) |  | ||||||
| - Promoted BaseDynamicField to DynamicField |  | ||||||
|  |  | ||||||
| Changes in 0.6.9 |  | ||||||
| ================ |  | ||||||
| - Fixed sparse indexes on inherited docs |  | ||||||
| - Removed FileField auto deletion, needs more work maybe 0.7 |  | ||||||
|  |  | ||||||
| Changes in 0.6.8 |  | ||||||
| ================ |  | ||||||
| - Fixed FileField losing reference when no default set |  | ||||||
| - Removed possible race condition from FileField (grid_file) |  | ||||||
| - Added assignment to save, can now do: b = MyDoc(**kwargs).save() |  | ||||||
| - Added support for pull operations on nested EmbeddedDocuments |  | ||||||
| - Added support for choices with GenericReferenceFields |  | ||||||
| - Added support for choices with GenericEmbeddedDocumentFields |  | ||||||
| - Fixed Django 1.4 sessions first save data loss |  | ||||||
| - FileField now automatically delete files on .delete() |  | ||||||
| - Fix for GenericReference to_mongo method |  | ||||||
| - Fixed connection regression |  | ||||||
| - Updated Django User document, now allows inheritance |  | ||||||
|  |  | ||||||
| Changes in 0.6.7 |  | ||||||
| ================ |  | ||||||
| - Fixed indexing on '_id' or 'pk' or 'id' |  | ||||||
| - Invalid data from the DB now raises a InvalidDocumentError |  | ||||||
| - Cleaned up the Validation Error - docs and code |  | ||||||
| - Added meta `auto_create_index` so you can disable index creation |  | ||||||
| - Added write concern options to inserts |  | ||||||
| - Fixed typo in meta for index options |  | ||||||
| - Bug fix Read preference now passed correctly |  | ||||||
| - Added support for File like objects for GridFS |  | ||||||
| - Fix for #473 - Dereferencing abstracts |  | ||||||
|  |  | ||||||
| Changes in 0.6.6 |  | ||||||
| ================ |  | ||||||
| - Django 1.4 fixed (finally) |  | ||||||
| - Added tests for Django |  | ||||||
|  |  | ||||||
| Changes in 0.6.5 |  | ||||||
| ================ |  | ||||||
| - More Django updates |  | ||||||
|  |  | ||||||
| Changes in 0.6.4 |  | ||||||
| ================ |  | ||||||
|  |  | ||||||
| - Refactored connection / fixed replicasetconnection |  | ||||||
| - Bug fix for unknown connection alias error message |  | ||||||
| - Sessions support Django 1.3 and Django 1.4 |  | ||||||
| - Minor fix for ReferenceField |  | ||||||
|  |  | ||||||
| Changes in 0.6.3 |  | ||||||
| ================ |  | ||||||
| - Updated sessions for Django 1.4 |  | ||||||
| - Bug fix for updates where listfields contain embedded documents |  | ||||||
| - Bug fix for collection naming and mixins |  | ||||||
|  |  | ||||||
| Changes in 0.6.2 |  | ||||||
| ================ |  | ||||||
| - Updated documentation for ReplicaSet connections |  | ||||||
| - Hack round _types issue with SERVER-5247 - querying other arrays may also cause problems. |  | ||||||
|  |  | ||||||
| Changes in 0.6.1 |  | ||||||
| ================ |  | ||||||
| - Fix for replicaSet connections |  | ||||||
|  |  | ||||||
| Changes in 0.6 |  | ||||||
| ================ |  | ||||||
|  |  | ||||||
| - Added FutureWarning to inherited classes not declaring 'allow_inheritance' as the default will change in 0.7 |  | ||||||
| - Added support for covered indexes when inheritance is off |  | ||||||
| - No longer always upsert on save for items with a '_id' |  | ||||||
| - Error raised if update doesn't have an operation |  | ||||||
| - DeReferencing is now thread safe |  | ||||||
| - Errors raised if trying to perform a join in a query |  | ||||||
| - Updates can now take __raw__ queries |  | ||||||
| - Added custom 2D index declarations |  | ||||||
| - Added replicaSet connection support |  | ||||||
| - Updated deprecated imports from pymongo (safe for pymongo 2.2) |  | ||||||
| - Added uri support for connections |  | ||||||
| - Added scalar for efficiently returning partial data values (aliased to values_list) |  | ||||||
| - Fixed limit skip bug |  | ||||||
| - Improved Inheritance / Mixin |  | ||||||
| - Added sharding support |  | ||||||
| - Added pymongo 2.1 support |  | ||||||
| - Fixed Abstract documents can now declare indexes |  | ||||||
| - Added db_alias support to individual documents |  | ||||||
| - Fixed GridFS documents can now be pickled |  | ||||||
| - Added Now raises an InvalidDocumentError when declaring multiple fields with the same db_field |  | ||||||
| - Added InvalidQueryError when calling with_id with a filter |  | ||||||
| - Added support for DBRefs in distinct() |  | ||||||
| - Fixed issue saving False booleans |  | ||||||
| - Fixed issue with dynamic documents deltas |  | ||||||
| - Added Reverse Delete Rule support to ListFields - MapFields aren't supported |  | ||||||
| - Added customisable cascade kwarg options |  | ||||||
| - Fixed Handle None values for non-required fields |  | ||||||
| - Removed Document._get_subclasses() - no longer required |  | ||||||
| - Fixed bug requiring subclasses when not actually needed |  | ||||||
| - Fixed deletion of dynamic data |  | ||||||
| - Added support for the $elementMatch operator |  | ||||||
| - Added reverse option to SortedListFields |  | ||||||
| - Fixed dereferencing - multi directional list dereferencing |  | ||||||
| - Fixed issue creating indexes with recursive embedded documents |  | ||||||
| - Fixed recursive lookup in _unique_with_indexes |  | ||||||
| - Fixed passing ComplexField defaults to constructor for ReferenceFields |  | ||||||
| - Fixed validation of DictField Int keys |  | ||||||
| - Added optional cascade saving |  | ||||||
| - Fixed dereferencing - max_depth now taken into account |  | ||||||
| - Fixed document mutation saving issue |  | ||||||
| - Fixed positional operator when replacing embedded documents |  | ||||||
| - Added Non-Django Style choices back (you can have either) |  | ||||||
| - Fixed __repr__ of a sliced queryset |  | ||||||
| - Added recursive validation error of documents / complex fields |  | ||||||
| - Fixed breaking during queryset iteration |  | ||||||
| - Added pre and post bulk-insert signals |  | ||||||
| - Added ImageField - requires PIL |  | ||||||
| - Fixed Reference Fields can be None in get_or_create / queries |  | ||||||
| - Fixed accessing pk on an embedded document |  | ||||||
| - Fixed calling a queryset after drop_collection now recreates the collection |  | ||||||
| - Add field name to validation exception messages |  | ||||||
| - Added UUID field |  | ||||||
| - Improved efficiency of .get() |  | ||||||
| - Updated ComplexFields so if required they won't accept empty lists / dicts |  | ||||||
| - Added spec file for rpm-based distributions |  | ||||||
| - Fixed ListField so it doesnt accept strings |  | ||||||
| - Added DynamicDocument and EmbeddedDynamicDocument classes for expando schemas |  | ||||||
|  |  | ||||||
| Changes in v0.5.2 |  | ||||||
| ================= |  | ||||||
|  |  | ||||||
| - A Robust Circular reference bugfix |  | ||||||
|  |  | ||||||
|  |  | ||||||
| Changes in v0.5.1 |  | ||||||
| ================= |  | ||||||
|  |  | ||||||
| - Fixed simple circular reference bug |  | ||||||
|  |  | ||||||
| Changes in v0.5 |  | ||||||
| =============== |  | ||||||
|  |  | ||||||
| - Added InvalidDocumentError - so Document core methods can't be overwritten |  | ||||||
| - Added GenericEmbeddedDocument - so you can embed any type of embeddable document |  | ||||||
| - Added within_polygon support - for those with mongodb 1.9 |  | ||||||
| - Updated sum / average to use map_reduce as db.eval doesn't work in sharded environments |  | ||||||
| - Added where() - filter to allowing users to specify query expressions as Javascript |  | ||||||
| - Added SequenceField - for creating sequential counters |  | ||||||
| - Added update() convenience method to a document |  | ||||||
| - Added cascading saves - so changes to Referenced documents are saved on .save() |  | ||||||
| - Added select_related() support |  | ||||||
| - Added support for the positional operator |  | ||||||
| - Updated geo index checking to be recursive and check in embedded documents |  | ||||||
| - Updated default collection naming convention |  | ||||||
| - Added Document Mixin support |  | ||||||
| - Fixed queryet __repr__ mid iteration |  | ||||||
| - Added hint() support, so cantell Mongo the proper index to use for the query |  | ||||||
| - Fixed issue with inconsitent setting of _cls breaking inherited referencing |  | ||||||
| - Added help_text and verbose_name to fields to help with some form libs |  | ||||||
| - Updated item_frequencies to handle embedded document lookups |  | ||||||
| - Added delta tracking now only sets / unsets explicitly changed fields |  | ||||||
| - Fixed saving so sets updated values rather than overwrites |  | ||||||
| - Added ComplexDateTimeField - Handles datetimes correctly with microseconds |  | ||||||
| - Added ComplexBaseField - for improved flexibility and performance |  | ||||||
| - Added get_FIELD_display() method for easy choice field displaying |  | ||||||
| - Added queryset.slave_okay(enabled) method |  | ||||||
| - Updated queryset.timeout(enabled) and queryset.snapshot(enabled) to be chainable |  | ||||||
| - Added insert method for bulk inserts |  | ||||||
| - Added blinker signal support |  | ||||||
| - Added query_counter context manager for tests |  | ||||||
| - Added map_reduce method item_frequencies and set as default (as db.eval doesn't work in sharded environments) |  | ||||||
| - Added inline_map_reduce option to map_reduce |  | ||||||
| - Updated connection exception so it provides more info on the cause. |  | ||||||
| - Added searching multiple levels deep in ``DictField`` |  | ||||||
| - Added ``DictField`` entries containing strings to use matching operators |  | ||||||
| - Added ``MapField``, similar to ``DictField`` |  | ||||||
| - Added Abstract Base Classes |  | ||||||
| - Added Custom Objects Managers |  | ||||||
| - Added sliced subfields updating |  | ||||||
| - Added ``NotRegistered`` exception if dereferencing ``Document`` not in the registry |  | ||||||
| - Added a write concern for ``save``, ``update``, ``update_one`` and ``get_or_create`` |  | ||||||
| - Added slicing / subarray fetching controls |  | ||||||
| - Fixed various unique index and other index issues |  | ||||||
| - Fixed threaded connection issues |  | ||||||
| - Added spherical geospatial query operators |  | ||||||
| - Updated queryset to handle latest version of pymongo |  | ||||||
|   map_reduce now requires an output. |  | ||||||
| - Added ``Document`` __hash__, __ne__ for pickling |  | ||||||
| - Added ``FileField`` optional size arg for read method |  | ||||||
| - Fixed ``FileField`` seek and tell methods for reading files |  | ||||||
| - Added ``QuerySet.clone`` to support copying querysets |  | ||||||
| - Fixed item_frequencies when using name thats the same as a native js function |  | ||||||
| - Added reverse delete rules |  | ||||||
| - Fixed issue with unset operation |  | ||||||
| - Fixed Q-object bug |  | ||||||
| - Added ``QuerySet.all_fields`` resets previous .only() and .exclude() |  | ||||||
| - Added ``QuerySet.exclude`` |  | ||||||
| - Added django style choices |  | ||||||
| - Fixed order and filter issue |  | ||||||
| - Added ``QuerySet.only`` subfield support |  | ||||||
| - Added creation_counter to ``BaseField`` allowing fields to be sorted in the |  | ||||||
|   way the user has specified them |  | ||||||
| - Fixed various errors |  | ||||||
| - Added many tests |  | ||||||
|  |  | ||||||
| Changes in v0.4 | Changes in v0.4 | ||||||
| =============== | =============== | ||||||
| - Added ``GridFSStorage`` Django storage backend | - Added ``GridFSStorage`` Django storage backend | ||||||
| @@ -285,7 +32,7 @@ Changes in v0.3 | |||||||
| =============== | =============== | ||||||
| - Added MapReduce support | - Added MapReduce support | ||||||
| - Added ``contains``, ``startswith`` and ``endswith`` query operators (and | - Added ``contains``, ``startswith`` and ``endswith`` query operators (and | ||||||
|   case-insensitive versions that are prefixed with 'i') |   case-insensitive versions that are prefixed with 'i')  | ||||||
| - Deprecated fields' ``name`` parameter, replaced with ``db_field`` | - Deprecated fields' ``name`` parameter, replaced with ``db_field`` | ||||||
| - Added ``QuerySet.only`` for only retrieving specific fields | - Added ``QuerySet.only`` for only retrieving specific fields | ||||||
| - Added ``QuerySet.in_bulk()`` for bulk querying using ids | - Added ``QuerySet.in_bulk()`` for bulk querying using ids | ||||||
| @@ -332,7 +79,7 @@ Changes in v0.2 | |||||||
| =============== | =============== | ||||||
| - Added ``Q`` class for building advanced queries | - Added ``Q`` class for building advanced queries | ||||||
| - Added ``QuerySet`` methods for atomic updates to documents | - Added ``QuerySet`` methods for atomic updates to documents | ||||||
| - Fields may now specify ``unique=True`` to enforce uniqueness across a | - Fields may now specify ``unique=True`` to enforce uniqueness across a  | ||||||
|   collection |   collection | ||||||
| - Added option for default document ordering | - Added option for default document ordering | ||||||
| - Fixed bug in index definitions | - Fixed bug in index definitions | ||||||
| @@ -340,7 +87,7 @@ Changes in v0.2 | |||||||
| Changes in v0.1.3 | Changes in v0.1.3 | ||||||
| ================= | ================= | ||||||
| - Added Django authentication backend | - Added Django authentication backend | ||||||
| - Added ``Document.meta`` support for indexes, which are ensured just before | - Added ``Document.meta`` support for indexes, which are ensured just before  | ||||||
|   querying takes place |   querying takes place | ||||||
| - A few minor bugfixes | - A few minor bugfixes | ||||||
|  |  | ||||||
|   | |||||||
| @@ -38,7 +38,7 @@ master_doc = 'index' | |||||||
|  |  | ||||||
| # General information about the project. | # General information about the project. | ||||||
| project = u'MongoEngine' | project = u'MongoEngine' | ||||||
| copyright = u'2009-2012, MongoEngine Authors' | copyright = u'2009-2010, Harry Marr' | ||||||
|  |  | ||||||
| # The version info for the project you're documenting, acts as replacement for | # The version info for the project you're documenting, acts as replacement for | ||||||
| # |version| and |release|, also used in various other places throughout the | # |version| and |release|, also used in various other places throughout the | ||||||
| @@ -121,7 +121,7 @@ html_theme_path = ['_themes'] | |||||||
| # Add any paths that contain custom static files (such as style sheets) here, | # Add any paths that contain custom static files (such as style sheets) here, | ||||||
| # relative to this directory. They are copied after the builtin static files, | # relative to this directory. They are copied after the builtin static files, | ||||||
| # so a file named "default.css" will overwrite the builtin "default.css". | # so a file named "default.css" will overwrite the builtin "default.css". | ||||||
| #html_static_path = ['_static'] | html_static_path = ['_static'] | ||||||
|  |  | ||||||
| # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, | ||||||
| # using the given strftime format. | # using the given strftime format. | ||||||
|   | |||||||
| @@ -2,21 +2,19 @@ | |||||||
| Using MongoEngine with Django | Using MongoEngine with Django | ||||||
| ============================= | ============================= | ||||||
|  |  | ||||||
| .. note :: Updated to support Django 1.4 |  | ||||||
|  |  | ||||||
| Connecting | Connecting | ||||||
| ========== | ========== | ||||||
| In your **settings.py** file, ignore the standard database settings (unless you | In your **settings.py** file, ignore the standard database settings (unless you | ||||||
| also plan to use the ORM in your project), and instead call | also plan to use the ORM in your project), and instead call  | ||||||
| :func:`~mongoengine.connect` somewhere in the settings module. | :func:`~mongoengine.connect` somewhere in the settings module. | ||||||
|  |  | ||||||
| Authentication | Authentication | ||||||
| ============== | ============== | ||||||
| MongoEngine includes a Django authentication backend, which uses MongoDB. The | MongoEngine includes a Django authentication backend, which uses MongoDB. The | ||||||
| :class:`~mongoengine.django.auth.User` model is a MongoEngine | :class:`~mongoengine.django.auth.User` model is a MongoEngine  | ||||||
| :class:`~mongoengine.Document`, but implements most of the methods and | :class:`~mongoengine.Document`, but implements most of the methods and  | ||||||
| attributes that the standard Django :class:`User` model does - so the two are | attributes that the standard Django :class:`User` model does - so the two are | ||||||
| moderately compatible. Using this backend will allow you to store users in | moderately compatible. Using this backend will allow you to store users in  | ||||||
| MongoDB but still use many of the Django authentication infrastucture (such as | MongoDB but still use many of the Django authentication infrastucture (such as | ||||||
| the :func:`login_required` decorator and the :func:`authenticate` function). To | the :func:`login_required` decorator and the :func:`authenticate` function). To | ||||||
| enable the MongoEngine auth backend, add the following to you **settings.py** | enable the MongoEngine auth backend, add the following to you **settings.py** | ||||||
| @@ -26,7 +24,7 @@ file:: | |||||||
|         'mongoengine.django.auth.MongoEngineBackend', |         'mongoengine.django.auth.MongoEngineBackend', | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
| The :mod:`~mongoengine.django.auth` module also contains a | The :mod:`~mongoengine.django.auth` module also contains a  | ||||||
| :func:`~mongoengine.django.auth.get_user` helper function, that takes a user's | :func:`~mongoengine.django.auth.get_user` helper function, that takes a user's | ||||||
| :attr:`id` and returns a :class:`~mongoengine.django.auth.User` object. | :attr:`id` and returns a :class:`~mongoengine.django.auth.User` object. | ||||||
|  |  | ||||||
| @@ -51,11 +49,10 @@ Storage | |||||||
| ======= | ======= | ||||||
| With MongoEngine's support for GridFS via the :class:`~mongoengine.FileField`, | With MongoEngine's support for GridFS via the :class:`~mongoengine.FileField`, | ||||||
| it is useful to have a Django file storage backend that wraps this. The new | it is useful to have a Django file storage backend that wraps this. The new | ||||||
| storage module is called :class:`~mongoengine.django.storage.GridFSStorage`. | storage module is called :class:`~mongoengine.django.GridFSStorage`. Using it | ||||||
| Using it is very similar to using the default FileSystemStorage.:: | is very similar to using the default FileSystemStorage.:: | ||||||
|  |  | ||||||
|     from mongoengine.django.storage import GridFSStorage |     fs = mongoengine.django.GridFSStorage() | ||||||
|     fs = GridFSStorage() |  | ||||||
|  |  | ||||||
|     filename = fs.save('hello.txt', 'Hello, World!') |     filename = fs.save('hello.txt', 'Hello, World!') | ||||||
|  |  | ||||||
|   | |||||||
| @@ -3,7 +3,6 @@ | |||||||
| ===================== | ===================== | ||||||
| Connecting to MongoDB | Connecting to MongoDB | ||||||
| ===================== | ===================== | ||||||
|  |  | ||||||
| To connect to a running instance of :program:`mongod`, use the | To connect to a running instance of :program:`mongod`, use the | ||||||
| :func:`~mongoengine.connect` function. The first argument is the name of the | :func:`~mongoengine.connect` function. The first argument is the name of the | ||||||
| database to connect to. If the database does not exist, it will be created. If | database to connect to. If the database does not exist, it will be created. If | ||||||
| @@ -19,47 +18,3 @@ provide :attr:`host` and :attr:`port` arguments to | |||||||
| :func:`~mongoengine.connect`:: | :func:`~mongoengine.connect`:: | ||||||
|  |  | ||||||
|     connect('project1', host='192.168.1.35', port=12345) |     connect('project1', host='192.168.1.35', port=12345) | ||||||
|  |  | ||||||
| Uri style connections are also supported as long as you include the database |  | ||||||
| name - just supply the uri as the :attr:`host` to |  | ||||||
| :func:`~mongoengine.connect`:: |  | ||||||
|  |  | ||||||
|     connect('project1', host='mongodb://localhost/database_name') |  | ||||||
|  |  | ||||||
| ReplicaSets |  | ||||||
| =========== |  | ||||||
|  |  | ||||||
| MongoEngine now supports :func:`~pymongo.replica_set_connection.ReplicaSetConnection` |  | ||||||
| to use them please use a URI style connection and provide the `replicaSet` name in the |  | ||||||
| connection kwargs. |  | ||||||
|  |  | ||||||
| Multiple Databases |  | ||||||
| ================== |  | ||||||
|  |  | ||||||
| Multiple database support was added in MongoEngine 0.6. To use multiple |  | ||||||
| databases you can use :func:`~mongoengine.connect` and provide an `alias` name |  | ||||||
| for the connection - if no `alias` is provided then "default" is used. |  | ||||||
|  |  | ||||||
| In the background this uses :func:`~mongoengine.register_connection` to |  | ||||||
| store the data and you can register all aliases up front if required. |  | ||||||
|  |  | ||||||
| Individual documents can also support multiple databases by providing a |  | ||||||
| `db_alias` in their meta data.  This allows :class:`~pymongo.dbref.DBRef` objects |  | ||||||
| to point across databases and collections.  Below is an example schema, using |  | ||||||
| 3 different databases to store data:: |  | ||||||
|  |  | ||||||
|         class User(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|             meta = {"db_alias": "user-db"} |  | ||||||
|  |  | ||||||
|         class Book(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|             meta = {"db_alias": "book-db"} |  | ||||||
|  |  | ||||||
|         class AuthorBooks(Document): |  | ||||||
|             author = ReferenceField(User) |  | ||||||
|             book = ReferenceField(Book) |  | ||||||
|  |  | ||||||
|             meta = {"db_alias": "users-books-db"} |  | ||||||
|   | |||||||
| @@ -4,14 +4,14 @@ Defining documents | |||||||
| In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When | In MongoDB, a **document** is roughly equivalent to a **row** in an RDBMS. When | ||||||
| working with relational databases, rows are stored in **tables**, which have a | working with relational databases, rows are stored in **tables**, which have a | ||||||
| strict **schema** that the rows follow. MongoDB stores documents in | strict **schema** that the rows follow. MongoDB stores documents in | ||||||
| **collections** rather than tables - the principle difference is that no schema | **collections** rather than tables - the principle difference is that no schema  | ||||||
| is enforced at a database level. | is enforced at a database level.  | ||||||
|  |  | ||||||
| Defining a document's schema | Defining a document's schema | ||||||
| ============================ | ============================ | ||||||
| MongoEngine allows you to define schemata for documents as this helps to reduce | MongoEngine allows you to define schemata for documents as this helps to reduce | ||||||
| coding errors, and allows for utility methods to be defined on fields which may | coding errors, and allows for utility methods to be defined on fields which may | ||||||
| be present. | be present.  | ||||||
|  |  | ||||||
| To define a schema for a document, create a class that inherits from | To define a schema for a document, create a class that inherits from | ||||||
| :class:`~mongoengine.Document`. Fields are specified by adding **field | :class:`~mongoengine.Document`. Fields are specified by adding **field | ||||||
| @@ -19,39 +19,11 @@ objects** as class attributes to the document class:: | |||||||
|  |  | ||||||
|     from mongoengine import * |     from mongoengine import * | ||||||
|     import datetime |     import datetime | ||||||
|  |      | ||||||
|     class Page(Document): |     class Page(Document): | ||||||
|         title = StringField(max_length=200, required=True) |         title = StringField(max_length=200, required=True) | ||||||
|         date_modified = DateTimeField(default=datetime.datetime.now) |         date_modified = DateTimeField(default=datetime.datetime.now) | ||||||
|  |  | ||||||
| Dynamic document schemas |  | ||||||
| ======================== |  | ||||||
| One of the benefits of MongoDb is dynamic schemas for a collection, whilst data |  | ||||||
| should be planned and organised (after all explicit is better than implicit!) |  | ||||||
| there are scenarios where having dynamic / expando style documents is desirable. |  | ||||||
|  |  | ||||||
| :class:`~mongoengine.DynamicDocument` documents work in the same way as |  | ||||||
| :class:`~mongoengine.Document` but any data / attributes set to them will also |  | ||||||
| be saved :: |  | ||||||
|  |  | ||||||
|     from mongoengine import * |  | ||||||
|  |  | ||||||
|     class Page(DynamicDocument): |  | ||||||
|         title = StringField(max_length=200, required=True) |  | ||||||
|  |  | ||||||
|     # Create a new page and add tags |  | ||||||
|     >>> page = Page(title='Using MongoEngine') |  | ||||||
|     >>> page.tags = ['mongodb', 'mongoengine'] |  | ||||||
|     >>> page.save() |  | ||||||
|  |  | ||||||
|     >>> Page.objects(tags='mongoengine').count() |  | ||||||
|     >>> 1 |  | ||||||
|  |  | ||||||
| ..note:: |  | ||||||
|  |  | ||||||
|    There is one caveat on Dynamic Documents: fields cannot start with `_` |  | ||||||
|  |  | ||||||
|  |  | ||||||
| Fields | Fields | ||||||
| ====== | ====== | ||||||
| By default, fields are not required. To make a field mandatory, set the | By default, fields are not required. To make a field mandatory, set the | ||||||
| @@ -59,38 +31,31 @@ By default, fields are not required. To make a field mandatory, set the | |||||||
| validation constraints available (such as :attr:`max_length` in the example | validation constraints available (such as :attr:`max_length` in the example | ||||||
| above). Fields may also take default values, which will be used if a value is | above). Fields may also take default values, which will be used if a value is | ||||||
| not provided. Default values may optionally be a callable, which will be called | not provided. Default values may optionally be a callable, which will be called | ||||||
| to retrieve the value (such as in the above example). The field types available | to retrieve the value (such as in the above example). The field types available  | ||||||
| are as follows: | are as follows: | ||||||
|  |  | ||||||
| * :class:`~mongoengine.BinaryField` |  | ||||||
| * :class:`~mongoengine.BooleanField` |  | ||||||
| * :class:`~mongoengine.ComplexDateTimeField` |  | ||||||
| * :class:`~mongoengine.DateTimeField` |  | ||||||
| * :class:`~mongoengine.DecimalField` |  | ||||||
| * :class:`~mongoengine.DictField` |  | ||||||
| * :class:`~mongoengine.DynamicField` |  | ||||||
| * :class:`~mongoengine.EmailField` |  | ||||||
| * :class:`~mongoengine.EmbeddedDocumentField` |  | ||||||
| * :class:`~mongoengine.FileField` |  | ||||||
| * :class:`~mongoengine.FloatField` |  | ||||||
| * :class:`~mongoengine.GenericEmbeddedDocumentField` |  | ||||||
| * :class:`~mongoengine.GenericReferenceField` |  | ||||||
| * :class:`~mongoengine.GeoPointField` |  | ||||||
| * :class:`~mongoengine.ImageField` |  | ||||||
| * :class:`~mongoengine.IntField` |  | ||||||
| * :class:`~mongoengine.ListField` |  | ||||||
| * :class:`~mongoengine.MapField` |  | ||||||
| * :class:`~mongoengine.ObjectIdField` |  | ||||||
| * :class:`~mongoengine.ReferenceField` |  | ||||||
| * :class:`~mongoengine.SequenceField` |  | ||||||
| * :class:`~mongoengine.SortedListField` |  | ||||||
| * :class:`~mongoengine.StringField` | * :class:`~mongoengine.StringField` | ||||||
| * :class:`~mongoengine.URLField` | * :class:`~mongoengine.URLField` | ||||||
| * :class:`~mongoengine.UUIDField` | * :class:`~mongoengine.IntField` | ||||||
|  | * :class:`~mongoengine.FloatField` | ||||||
|  | * :class:`~mongoengine.DecimalField` | ||||||
|  | * :class:`~mongoengine.DateTimeField` | ||||||
|  | * :class:`~mongoengine.ListField` | ||||||
|  | * :class:`~mongoengine.DictField` | ||||||
|  | * :class:`~mongoengine.ObjectIdField` | ||||||
|  | * :class:`~mongoengine.EmbeddedDocumentField` | ||||||
|  | * :class:`~mongoengine.ReferenceField` | ||||||
|  | * :class:`~mongoengine.GenericReferenceField` | ||||||
|  | * :class:`~mongoengine.BooleanField` | ||||||
|  | * :class:`~mongoengine.FileField` | ||||||
|  | * :class:`~mongoengine.EmailField` | ||||||
|  | * :class:`~mongoengine.SortedListField` | ||||||
|  | * :class:`~mongoengine.BinaryField` | ||||||
|  | * :class:`~mongoengine.GeoPointField` | ||||||
|  |  | ||||||
| Field arguments | Field arguments | ||||||
| --------------- | --------------- | ||||||
| Each field type can be customized by keyword arguments.  The following keyword | Each field type can be customized by keyword arguments.  The following keyword  | ||||||
| arguments can be set on all fields: | arguments can be set on all fields: | ||||||
|  |  | ||||||
| :attr:`db_field` (Default: None) | :attr:`db_field` (Default: None) | ||||||
| @@ -101,7 +66,7 @@ arguments can be set on all fields: | |||||||
|  |  | ||||||
| :attr:`required` (Default: False) | :attr:`required` (Default: False) | ||||||
|     If set to True and the field is not set on the document instance, a |     If set to True and the field is not set on the document instance, a | ||||||
|     :class:`~mongoengine.ValidationError` will be raised when the document is |     :class:`~mongoengine.base.ValidationError` will be raised when the document is | ||||||
|     validated. |     validated. | ||||||
|  |  | ||||||
| :attr:`default` (Default: None) | :attr:`default` (Default: None) | ||||||
| @@ -109,7 +74,7 @@ arguments can be set on all fields: | |||||||
|  |  | ||||||
|     The definion of default parameters follow `the general rules on Python |     The definion of default parameters follow `the general rules on Python | ||||||
|     <http://docs.python.org/reference/compound_stmts.html#function-definitions>`__, |     <http://docs.python.org/reference/compound_stmts.html#function-definitions>`__, | ||||||
|     which means that some care should be taken when dealing with default mutable objects |     which means that some care should be taken when dealing with default mutable objects  | ||||||
|     (like in :class:`~mongoengine.ListField` or :class:`~mongoengine.DictField`):: |     (like in :class:`~mongoengine.ListField` or :class:`~mongoengine.DictField`):: | ||||||
|  |  | ||||||
|         class ExampleFirst(Document): |         class ExampleFirst(Document): | ||||||
| @@ -124,7 +89,7 @@ arguments can be set on all fields: | |||||||
|             # This can make an .append call to  add values to the default (and all the following objects), |             # This can make an .append call to  add values to the default (and all the following objects), | ||||||
|             # instead to just an object |             # instead to just an object | ||||||
|             values = ListField(IntField(), default=[1,2,3]) |             values = ListField(IntField(), default=[1,2,3]) | ||||||
|  |      | ||||||
|  |  | ||||||
| :attr:`unique` (Default: False) | :attr:`unique` (Default: False) | ||||||
|     When True, no documents in the collection will have the same value for this |     When True, no documents in the collection will have the same value for this | ||||||
| @@ -138,35 +103,8 @@ arguments can be set on all fields: | |||||||
|     When True, use this field as a primary key for the collection. |     When True, use this field as a primary key for the collection. | ||||||
|  |  | ||||||
| :attr:`choices` (Default: None) | :attr:`choices` (Default: None) | ||||||
|     An iterable (e.g. a list or tuple) of choices to which the value of this |     An iterable of choices to which the value of this field should be limited. | ||||||
|     field should be limited. |      | ||||||
|  |  | ||||||
|     Can be either be a nested tuples of value (stored in mongo) and a |  | ||||||
|     human readable key :: |  | ||||||
|  |  | ||||||
|         SIZE = (('S', 'Small'), |  | ||||||
|                 ('M', 'Medium'), |  | ||||||
|                 ('L', 'Large'), |  | ||||||
|                 ('XL', 'Extra Large'), |  | ||||||
|                 ('XXL', 'Extra Extra Large')) |  | ||||||
|  |  | ||||||
|  |  | ||||||
|         class Shirt(Document): |  | ||||||
|             size = StringField(max_length=3, choices=SIZE) |  | ||||||
|  |  | ||||||
|     Or a flat iterable just containing values :: |  | ||||||
|  |  | ||||||
|         SIZE = ('S', 'M', 'L', 'XL', 'XXL') |  | ||||||
|  |  | ||||||
|         class Shirt(Document): |  | ||||||
|             size = StringField(max_length=3, choices=SIZE) |  | ||||||
|  |  | ||||||
| :attr:`help_text` (Default: None) |  | ||||||
|     Optional help text to output with the field - used by form libraries |  | ||||||
|  |  | ||||||
| :attr:`verbose_name` (Default: None) |  | ||||||
|     Optional human-readable name for the field - used by form libraries |  | ||||||
|  |  | ||||||
|  |  | ||||||
| List fields | List fields | ||||||
| ----------- | ----------- | ||||||
| @@ -183,7 +121,7 @@ Embedded documents | |||||||
| MongoDB has the ability to embed documents within other documents. Schemata may | MongoDB has the ability to embed documents within other documents. Schemata may | ||||||
| be defined for these embedded documents, just as they may be for regular | be defined for these embedded documents, just as they may be for regular | ||||||
| documents. To create an embedded document, just define a document as usual, but | documents. To create an embedded document, just define a document as usual, but | ||||||
| inherit from :class:`~mongoengine.EmbeddedDocument` rather than | inherit from :class:`~mongoengine.EmbeddedDocument` rather than  | ||||||
| :class:`~mongoengine.Document`:: | :class:`~mongoengine.Document`:: | ||||||
|  |  | ||||||
|     class Comment(EmbeddedDocument): |     class Comment(EmbeddedDocument): | ||||||
| @@ -196,8 +134,8 @@ document class as the first argument:: | |||||||
|     class Page(Document): |     class Page(Document): | ||||||
|         comments = ListField(EmbeddedDocumentField(Comment)) |         comments = ListField(EmbeddedDocumentField(Comment)) | ||||||
|  |  | ||||||
|     comment1 = Comment(content='Good work!') |     comment1 = Comment('Good work!') | ||||||
|     comment2 = Comment(content='Nice article!') |     comment2 = Comment('Nice article!') | ||||||
|     page = Page(comments=[comment1, comment2]) |     page = Page(comments=[comment1, comment2]) | ||||||
|  |  | ||||||
| Dictionary Fields | Dictionary Fields | ||||||
| @@ -206,7 +144,7 @@ Often, an embedded document may be used instead of a dictionary -- generally | |||||||
| this is recommended as dictionaries don't support validation or custom field | this is recommended as dictionaries don't support validation or custom field | ||||||
| types. However, sometimes you will not know the structure of what you want to | types. However, sometimes you will not know the structure of what you want to | ||||||
| store; in this situation a :class:`~mongoengine.DictField` is appropriate:: | store; in this situation a :class:`~mongoengine.DictField` is appropriate:: | ||||||
|  |      | ||||||
|     class SurveyResponse(Document): |     class SurveyResponse(Document): | ||||||
|         date = DateTimeField() |         date = DateTimeField() | ||||||
|         user = ReferenceField(User) |         user = ReferenceField(User) | ||||||
| @@ -214,19 +152,16 @@ store; in this situation a :class:`~mongoengine.DictField` is appropriate:: | |||||||
|  |  | ||||||
|     survey_response = SurveyResponse(date=datetime.now(), user=request.user) |     survey_response = SurveyResponse(date=datetime.now(), user=request.user) | ||||||
|     response_form = ResponseForm(request.POST) |     response_form = ResponseForm(request.POST) | ||||||
|     survey_response.answers = response_form.cleaned_data() |     survey_response.answers = response_form.cleaned_data()    | ||||||
|     survey_response.save() |     survey_response.save() | ||||||
|  |  | ||||||
| Dictionaries can store complex data, other dictionaries, lists, references to |  | ||||||
| other objects, so are the most flexible field type available. |  | ||||||
|  |  | ||||||
| Reference fields | Reference fields | ||||||
| ---------------- | ---------------- | ||||||
| References may be stored to other documents in the database using the | References may be stored to other documents in the database using the | ||||||
| :class:`~mongoengine.ReferenceField`. Pass in another document class as the | :class:`~mongoengine.ReferenceField`. Pass in another document class as the | ||||||
| first argument to the constructor, then simply assign document objects to the | first argument to the constructor, then simply assign document objects to the | ||||||
| field:: | field:: | ||||||
|  |      | ||||||
|     class User(Document): |     class User(Document): | ||||||
|         name = StringField() |         name = StringField() | ||||||
|  |  | ||||||
| @@ -258,76 +193,19 @@ as the constructor's argument:: | |||||||
|     class ProfilePage(Document): |     class ProfilePage(Document): | ||||||
|         content = StringField() |         content = StringField() | ||||||
|  |  | ||||||
|  |  | ||||||
| Dealing with deletion of referred documents |  | ||||||
| ''''''''''''''''''''''''''''''''''''''''''' |  | ||||||
| By default, MongoDB doesn't check the integrity of your data, so deleting |  | ||||||
| documents that other documents still hold references to will lead to consistency |  | ||||||
| issues.  Mongoengine's :class:`ReferenceField` adds some functionality to |  | ||||||
| safeguard against these kinds of database integrity problems, providing each |  | ||||||
| reference with a delete rule specification.  A delete rule is specified by |  | ||||||
| supplying the :attr:`reverse_delete_rule` attributes on the |  | ||||||
| :class:`ReferenceField` definition, like this:: |  | ||||||
|  |  | ||||||
|     class Employee(Document): |  | ||||||
|         ... |  | ||||||
|         profile_page = ReferenceField('ProfilePage', reverse_delete_rule=mongoengine.NULLIFY) |  | ||||||
|  |  | ||||||
| The declaration in this example means that when an :class:`Employee` object is |  | ||||||
| removed, the :class:`ProfilePage` that belongs to that employee is removed as |  | ||||||
| well.  If a whole batch of employees is removed, all profile pages that are |  | ||||||
| linked are removed as well. |  | ||||||
|  |  | ||||||
| Its value can take any of the following constants: |  | ||||||
|  |  | ||||||
| :const:`mongoengine.DO_NOTHING` |  | ||||||
|   This is the default and won't do anything.  Deletes are fast, but may cause |  | ||||||
|   database inconsistency or dangling references. |  | ||||||
| :const:`mongoengine.DENY` |  | ||||||
|   Deletion is denied if there still exist references to the object being |  | ||||||
|   deleted. |  | ||||||
| :const:`mongoengine.NULLIFY` |  | ||||||
|   Any object's fields still referring to the object being deleted are removed |  | ||||||
|   (using MongoDB's "unset" operation), effectively nullifying the relationship. |  | ||||||
| :const:`mongoengine.CASCADE` |  | ||||||
|   Any object containing fields that are refererring to the object being deleted |  | ||||||
|   are deleted first. |  | ||||||
| :const:`mongoengine.PULL` |  | ||||||
|   Removes the reference to the object (using MongoDB's "pull" operation) |  | ||||||
|   from any object's fields of |  | ||||||
|   :class:`~mongoengine.ListField` (:class:`~mongoengine.ReferenceField`). |  | ||||||
|  |  | ||||||
|  |  | ||||||
| .. warning:: |  | ||||||
|    A safety note on setting up these delete rules!  Since the delete rules are |  | ||||||
|    not recorded on the database level by MongoDB itself, but instead at runtime, |  | ||||||
|    in-memory, by the MongoEngine module, it is of the upmost importance |  | ||||||
|    that the module that declares the relationship is loaded **BEFORE** the |  | ||||||
|    delete is invoked. |  | ||||||
|  |  | ||||||
|    If, for example, the :class:`Employee` object lives in the |  | ||||||
|    :mod:`payroll` app, and the :class:`ProfilePage` in the :mod:`people` |  | ||||||
|    app, it is extremely important that the :mod:`people` app is loaded |  | ||||||
|    before any employee is removed, because otherwise, MongoEngine could |  | ||||||
|    never know this relationship exists. |  | ||||||
|  |  | ||||||
|    In Django, be sure to put all apps that have such delete rule declarations in |  | ||||||
|    their :file:`models.py` in the :const:`INSTALLED_APPS` tuple. |  | ||||||
|  |  | ||||||
|  |  | ||||||
| Generic reference fields | Generic reference fields | ||||||
| '''''''''''''''''''''''' | '''''''''''''''''''''''' | ||||||
| A second kind of reference field also exists, | A second kind of reference field also exists, | ||||||
| :class:`~mongoengine.GenericReferenceField`. This allows you to reference any | :class:`~mongoengine.GenericReferenceField`. This allows you to reference any | ||||||
| kind of :class:`~mongoengine.Document`, and hence doesn't take a | kind of :class:`~mongoengine.Document`, and hence doesn't take a  | ||||||
| :class:`~mongoengine.Document` subclass as a constructor argument:: | :class:`~mongoengine.Document` subclass as a constructor argument:: | ||||||
|  |  | ||||||
|     class Link(Document): |     class Link(Document): | ||||||
|         url = StringField() |         url = StringField() | ||||||
|  |          | ||||||
|     class Post(Document): |     class Post(Document): | ||||||
|         title = StringField() |         title = StringField() | ||||||
|  |          | ||||||
|     class Bookmark(Document): |     class Bookmark(Document): | ||||||
|         bookmark_object = GenericReferenceField() |         bookmark_object = GenericReferenceField() | ||||||
|  |  | ||||||
| @@ -341,10 +219,9 @@ kind of :class:`~mongoengine.Document`, and hence doesn't take a | |||||||
|     Bookmark(bookmark_object=post).save() |     Bookmark(bookmark_object=post).save() | ||||||
|  |  | ||||||
| .. note:: | .. note:: | ||||||
|  |  | ||||||
|    Using :class:`~mongoengine.GenericReferenceField`\ s is slightly less |    Using :class:`~mongoengine.GenericReferenceField`\ s is slightly less | ||||||
|    efficient than the standard :class:`~mongoengine.ReferenceField`\ s, so if |    efficient than the standard :class:`~mongoengine.ReferenceField`\ s, so if | ||||||
|    you will only be referencing one document type, prefer the standard |    you will only be referencing one document type, prefer the standard  | ||||||
|    :class:`~mongoengine.ReferenceField`. |    :class:`~mongoengine.ReferenceField`. | ||||||
|  |  | ||||||
| Uniqueness constraints | Uniqueness constraints | ||||||
| @@ -352,7 +229,7 @@ Uniqueness constraints | |||||||
| MongoEngine allows you to specify that a field should be unique across a | MongoEngine allows you to specify that a field should be unique across a | ||||||
| collection by providing ``unique=True`` to a :class:`~mongoengine.Field`\ 's | collection by providing ``unique=True`` to a :class:`~mongoengine.Field`\ 's | ||||||
| constructor. If you try to save a document that has the same value for a unique | constructor. If you try to save a document that has the same value for a unique | ||||||
| field as a document that is already in the database, a | field as a document that is already in the database, a  | ||||||
| :class:`~mongoengine.OperationError` will be raised. You may also specify | :class:`~mongoengine.OperationError` will be raised. You may also specify | ||||||
| multi-field uniqueness constraints by using :attr:`unique_with`, which may be | multi-field uniqueness constraints by using :attr:`unique_with`, which may be | ||||||
| either a single field name, or a list or tuple of field names:: | either a single field name, or a list or tuple of field names:: | ||||||
| @@ -364,14 +241,14 @@ either a single field name, or a list or tuple of field names:: | |||||||
|  |  | ||||||
| Skipping Document validation on save | Skipping Document validation on save | ||||||
| ------------------------------------ | ------------------------------------ | ||||||
| You can also skip the whole document validation process by setting | You can also skip the whole document validation process by setting  | ||||||
| ``validate=False`` when caling the :meth:`~mongoengine.document.Document.save` | ``validate=False`` when caling the :meth:`~mongoengine.document.Document.save`  | ||||||
| method:: | method:: | ||||||
|  |  | ||||||
|     class Recipient(Document): |     class Recipient(Document): | ||||||
|         name = StringField() |         name = StringField() | ||||||
|         email = EmailField() |         email = EmailField() | ||||||
|  |      | ||||||
|     recipient = Recipient(name='admin', email='root@localhost') |     recipient = Recipient(name='admin', email='root@localhost') | ||||||
|     recipient.save()               # will raise a ValidationError while |     recipient.save()               # will raise a ValidationError while | ||||||
|     recipient.save(validate=False) # won't |     recipient.save(validate=False) # won't | ||||||
| @@ -399,7 +276,7 @@ A :class:`~mongoengine.Document` may use a **Capped Collection** by specifying | |||||||
| stored in the collection, and :attr:`max_size` is the maximum size of the | stored in the collection, and :attr:`max_size` is the maximum size of the | ||||||
| collection in bytes. If :attr:`max_size` is not specified and | collection in bytes. If :attr:`max_size` is not specified and | ||||||
| :attr:`max_documents` is, :attr:`max_size` defaults to 10000000 bytes (10MB). | :attr:`max_documents` is, :attr:`max_size` defaults to 10000000 bytes (10MB). | ||||||
| The following example shows a :class:`Log` document that will be limited to | The following example shows a :class:`Log` document that will be limited to  | ||||||
| 1000 entries and 2MB of disk space:: | 1000 entries and 2MB of disk space:: | ||||||
|  |  | ||||||
|     class Log(Document): |     class Log(Document): | ||||||
| @@ -411,10 +288,9 @@ Indexes | |||||||
| You can specify indexes on collections to make querying faster. This is done | You can specify indexes on collections to make querying faster. This is done | ||||||
| by creating a list of index specifications called :attr:`indexes` in the | by creating a list of index specifications called :attr:`indexes` in the | ||||||
| :attr:`~mongoengine.Document.meta` dictionary, where an index specification may | :attr:`~mongoengine.Document.meta` dictionary, where an index specification may | ||||||
| either be a single field name, a tuple containing multiple field names, or a | either be a single field name, or a tuple containing multiple field names. A | ||||||
| dictionary containing a full index definition. A direction may be specified on | direction may be specified on fields by prefixing the field name with a **+** | ||||||
| fields by prefixing the field name with a **+** or a **-** sign. Note that | or a **-** sign. Note that direction only matters on multi-field indexes. :: | ||||||
| direction only matters on multi-field indexes. :: |  | ||||||
|  |  | ||||||
|     class Page(Document): |     class Page(Document): | ||||||
|         title = StringField() |         title = StringField() | ||||||
| @@ -423,47 +299,10 @@ direction only matters on multi-field indexes. :: | |||||||
|             'indexes': ['title', ('title', '-rating')] |             'indexes': ['title', ('title', '-rating')] | ||||||
|         } |         } | ||||||
|  |  | ||||||
| If a dictionary is passed then the following options are available: | .. note:: | ||||||
|  |    Geospatial indexes will be automatically created for all  | ||||||
| :attr:`fields` (Default: None) |    :class:`~mongoengine.GeoPointField`\ s | ||||||
|     The fields to index. Specified in the same format as described above. |          | ||||||
|  |  | ||||||
| :attr:`types` (Default: True) |  | ||||||
|     Whether the index should have the :attr:`_types` field added automatically |  | ||||||
|     to the start of the index. |  | ||||||
|  |  | ||||||
| :attr:`sparse` (Default: False) |  | ||||||
|     Whether the index should be sparse. |  | ||||||
|  |  | ||||||
| :attr:`unique` (Default: False) |  | ||||||
|     Whether the index should be sparse. |  | ||||||
|  |  | ||||||
| .. warning:: |  | ||||||
|  |  | ||||||
|  |  | ||||||
|    Inheritance adds extra indices. |  | ||||||
|    If don't need inheritance for a document turn inheritance off - see :ref:`document-inheritance`. |  | ||||||
|  |  | ||||||
|  |  | ||||||
| Geospatial indexes |  | ||||||
| --------------------------- |  | ||||||
| Geospatial indexes will be automatically created for all |  | ||||||
| :class:`~mongoengine.GeoPointField`\ s |  | ||||||
|  |  | ||||||
| It is also possible to explicitly define geospatial indexes. This is |  | ||||||
| useful if you need to define a geospatial index on a subfield of a |  | ||||||
| :class:`~mongoengine.DictField` or a custom field that contains a |  | ||||||
| point. To create a geospatial index you must prefix the field with the |  | ||||||
| ***** sign. :: |  | ||||||
|  |  | ||||||
|     class Place(Document): |  | ||||||
|         location = DictField() |  | ||||||
|         meta = { |  | ||||||
|             'indexes': [ |  | ||||||
|                 '*location.point', |  | ||||||
|             ], |  | ||||||
|         } |  | ||||||
|  |  | ||||||
| Ordering | Ordering | ||||||
| ======== | ======== | ||||||
| A default ordering can be specified for your | A default ordering can be specified for your | ||||||
| @@ -485,7 +324,7 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. :: | |||||||
|     blog_post_1 = BlogPost(title="Blog Post #1") |     blog_post_1 = BlogPost(title="Blog Post #1") | ||||||
|     blog_post_1.published_date = datetime(2010, 1, 5, 0, 0 ,0) |     blog_post_1.published_date = datetime(2010, 1, 5, 0, 0 ,0) | ||||||
|  |  | ||||||
|     blog_post_2 = BlogPost(title="Blog Post #2") |     blog_post_2 = BlogPost(title="Blog Post #2")  | ||||||
|     blog_post_2.published_date = datetime(2010, 1, 6, 0, 0 ,0) |     blog_post_2.published_date = datetime(2010, 1, 6, 0, 0 ,0) | ||||||
|  |  | ||||||
|     blog_post_3 = BlogPost(title="Blog Post #3") |     blog_post_3 = BlogPost(title="Blog Post #3") | ||||||
| @@ -497,38 +336,15 @@ subsequent calls to :meth:`~mongoengine.queryset.QuerySet.order_by`. :: | |||||||
|  |  | ||||||
|     # get the "first" BlogPost using default ordering |     # get the "first" BlogPost using default ordering | ||||||
|     # from BlogPost.meta.ordering |     # from BlogPost.meta.ordering | ||||||
|     latest_post = BlogPost.objects.first() |     latest_post = BlogPost.objects.first()  | ||||||
|     assert latest_post.title == "Blog Post #3" |     assert latest_post.title == "Blog Post #3" | ||||||
|  |  | ||||||
|     # override default ordering, order BlogPosts by "published_date" |     # override default ordering, order BlogPosts by "published_date" | ||||||
|     first_post = BlogPost.objects.order_by("+published_date").first() |     first_post = BlogPost.objects.order_by("+published_date").first() | ||||||
|     assert first_post.title == "Blog Post #1" |     assert first_post.title == "Blog Post #1" | ||||||
|  |  | ||||||
| Shard keys |  | ||||||
| ========== |  | ||||||
|  |  | ||||||
| If your collection is sharded, then you need to specify the shard key as a tuple, |  | ||||||
| using the :attr:`shard_key` attribute of :attr:`-mongoengine.Document.meta`. |  | ||||||
| This ensures that the shard key is sent with the query when calling the |  | ||||||
| :meth:`~mongoengine.document.Document.save` or |  | ||||||
| :meth:`~mongoengine.document.Document.update` method on an existing |  | ||||||
| :class:`-mongoengine.Document` instance:: |  | ||||||
|  |  | ||||||
|     class LogEntry(Document): |  | ||||||
|         machine = StringField() |  | ||||||
|         app = StringField() |  | ||||||
|         timestamp = DateTimeField() |  | ||||||
|         data = StringField() |  | ||||||
|  |  | ||||||
|         meta = { |  | ||||||
|             'shard_key': ('machine', 'timestamp',) |  | ||||||
|         } |  | ||||||
|  |  | ||||||
| .. _document-inheritance: |  | ||||||
|  |  | ||||||
| Document inheritance | Document inheritance | ||||||
| ==================== | ==================== | ||||||
|  |  | ||||||
| To create a specialised type of a :class:`~mongoengine.Document` you have | To create a specialised type of a :class:`~mongoengine.Document` you have | ||||||
| defined, you may subclass it and add any extra fields or methods you may need. | defined, you may subclass it and add any extra fields or methods you may need. | ||||||
| As this is new class is not a direct subclass of | As this is new class is not a direct subclass of | ||||||
| @@ -540,21 +356,16 @@ convenient and efficient retrieval of related documents:: | |||||||
|     class Page(Document): |     class Page(Document): | ||||||
|         title = StringField(max_length=200, required=True) |         title = StringField(max_length=200, required=True) | ||||||
|  |  | ||||||
|         meta = {'allow_inheritance': True} |  | ||||||
|  |  | ||||||
|     # Also stored in the collection named 'page' |     # Also stored in the collection named 'page' | ||||||
|     class DatedPage(Page): |     class DatedPage(Page): | ||||||
|         date = DateTimeField() |         date = DateTimeField() | ||||||
|  |  | ||||||
| .. note:: From 0.7 onwards you must declare `allow_inheritance` in the document meta. |  | ||||||
|  |  | ||||||
|  |  | ||||||
| Working with existing data | Working with existing data | ||||||
| -------------------------- | -------------------------- | ||||||
| To enable correct retrieval of documents involved in this kind of heirarchy, | To enable correct retrieval of documents involved in this kind of heirarchy, | ||||||
| two extra attributes are stored on each document in the database: :attr:`_cls` | two extra attributes are stored on each document in the database: :attr:`_cls` | ||||||
| and :attr:`_types`. These are hidden from the user through the MongoEngine | and :attr:`_types`. These are hidden from the user through the MongoEngine | ||||||
| interface, but may not be present if you are trying to use MongoEngine with | interface, but may not be present if you are trying to use MongoEngine with  | ||||||
| an existing database. For this reason, you may disable this inheritance | an existing database. For this reason, you may disable this inheritance | ||||||
| mechansim, removing the dependency of :attr:`_cls` and :attr:`_types`, enabling | mechansim, removing the dependency of :attr:`_cls` and :attr:`_types`, enabling | ||||||
| you to work with existing databases. To disable inheritance on a document | you to work with existing databases. To disable inheritance on a document | ||||||
|   | |||||||
| @@ -4,12 +4,12 @@ Documents instances | |||||||
| To create a new document object, create an instance of the relevant document | To create a new document object, create an instance of the relevant document | ||||||
| class, providing values for its fields as its constructor keyword arguments. | class, providing values for its fields as its constructor keyword arguments. | ||||||
| You may provide values for any of the fields on the document:: | You may provide values for any of the fields on the document:: | ||||||
|  |      | ||||||
|     >>> page = Page(title="Test Page") |     >>> page = Page(title="Test Page") | ||||||
|     >>> page.title |     >>> page.title | ||||||
|     'Test Page' |     'Test Page' | ||||||
|  |  | ||||||
| You may also assign values to the document's fields using standard object | You may also assign values to the document's fields using standard object  | ||||||
| attribute syntax:: | attribute syntax:: | ||||||
|  |  | ||||||
|     >>> page.title = "Example Page" |     >>> page.title = "Example Page" | ||||||
| @@ -18,40 +18,18 @@ attribute syntax:: | |||||||
|  |  | ||||||
| Saving and deleting documents | Saving and deleting documents | ||||||
| ============================= | ============================= | ||||||
| MongoEngine tracks changes to documents to provide efficient saving.  To save | To save the document to the database, call the | ||||||
| the document to the database, call the :meth:`~mongoengine.Document.save` method. | :meth:`~mongoengine.Document.save` method. If the document does not exist in | ||||||
| If the document does not exist in the database, it will be created. If it does | the database, it will be created. If it does already exist, it will be | ||||||
| already exist, then any changes will be updated atomically.  For example:: | updated. | ||||||
|  |  | ||||||
|     >>> page = Page(title="Test Page") | To delete a document, call the :meth:`~mongoengine.Document.delete` method. | ||||||
|     >>> page.save()  # Performs an insert | Note that this will only work if the document exists in the database and has a | ||||||
|     >>> page.title = "My Page" | valide :attr:`id`. | ||||||
|     >>> page.save()  # Performs an atomic set on the title field. |  | ||||||
|  |  | ||||||
| .. note:: |  | ||||||
|  |  | ||||||
|     Changes to documents are tracked and on the whole perform `set` operations. |  | ||||||
|  |  | ||||||
|     * ``list_field.pop(0)`` - *sets* the resulting list |  | ||||||
|     * ``del(list_field)``   - *unsets* whole list |  | ||||||
|  |  | ||||||
| .. seealso:: | .. seealso:: | ||||||
|     :ref:`guide-atomic-updates` |     :ref:`guide-atomic-updates` | ||||||
|  |  | ||||||
| Cascading Saves |  | ||||||
| --------------- |  | ||||||
| If your document contains :class:`~mongoengine.ReferenceField` or |  | ||||||
| :class:`~mongoengine.GenericReferenceField` objects, then by default the |  | ||||||
| :meth:`~mongoengine.Document.save` method will automatically save any changes to |  | ||||||
| those objects as well.  If this is not desired passing :attr:`cascade` as False |  | ||||||
| to the save method turns this feature off. |  | ||||||
|  |  | ||||||
| Deleting documents |  | ||||||
| ------------------ |  | ||||||
| To delete a document, call the :meth:`~mongoengine.Document.delete` method. |  | ||||||
| Note that this will only work if the document exists in the database and has a |  | ||||||
| valid :attr:`id`. |  | ||||||
|  |  | ||||||
| Document IDs | Document IDs | ||||||
| ============ | ============ | ||||||
| Each document in the database has a unique id. This may be accessed through the | Each document in the database has a unique id. This may be accessed through the | ||||||
| @@ -89,7 +67,6 @@ is an alias to :attr:`id`:: | |||||||
|     >>> page.id == page.pk |     >>> page.id == page.pk | ||||||
|  |  | ||||||
| .. note:: | .. note:: | ||||||
|  |  | ||||||
|    If you define your own primary key field, the field implicitly becomes |    If you define your own primary key field, the field implicitly becomes | ||||||
|    required, so a :class:`~mongoengine.ValidationError` will be thrown if |    required, so a :class:`ValidationError` will be thrown if you don't provide | ||||||
|    you don't provide it. |    it. | ||||||
|   | |||||||
| @@ -65,8 +65,7 @@ Deleting stored files is achieved with the :func:`delete` method:: | |||||||
|  |  | ||||||
|     marmot.photo.delete() |     marmot.photo.delete() | ||||||
|  |  | ||||||
| .. warning:: | .. note:: | ||||||
|  |  | ||||||
|     The FileField in a Document actually only stores the ID of a file in a |     The FileField in a Document actually only stores the ID of a file in a | ||||||
|     separate GridFS collection. This means that deleting a document |     separate GridFS collection. This means that deleting a document | ||||||
|     with a defined FileField does not actually delete the file. You must be |     with a defined FileField does not actually delete the file. You must be | ||||||
|   | |||||||
| @@ -11,4 +11,3 @@ User Guide | |||||||
|    document-instances |    document-instances | ||||||
|    querying |    querying | ||||||
|    gridfs |    gridfs | ||||||
|    signals |  | ||||||
|   | |||||||
| @@ -1,31 +1,31 @@ | |||||||
| ====================== | ====================== | ||||||
| Installing MongoEngine | Installing MongoEngine | ||||||
| ====================== | ====================== | ||||||
|  |  | ||||||
| To use MongoEngine, you will need to download `MongoDB <http://mongodb.org/>`_ | To use MongoEngine, you will need to download `MongoDB <http://mongodb.org/>`_ | ||||||
| and ensure it is running in an accessible location. You will also need | and ensure it is running in an accessible location. You will also need | ||||||
| `PyMongo <http://api.mongodb.org/python>`_ to use MongoEngine, but if you | `PyMongo <http://api.mongodb.org/python>`_ to use MongoEngine, but if you | ||||||
| install MongoEngine using setuptools, then the dependencies will be handled for | install MongoEngine using setuptools, then the dependencies will be handled for | ||||||
| you. | you. | ||||||
|  |  | ||||||
| MongoEngine is available on PyPI, so to use it you can use :program:`pip`: | MongoEngine is available on PyPI, so to use it you can use  | ||||||
|  | :program:`easy_install`: | ||||||
|  |      | ||||||
| .. code-block:: console | .. code-block:: console | ||||||
|  |  | ||||||
|     $ pip install mongoengine |     # easy_install mongoengine | ||||||
|  |  | ||||||
| Alternatively, if you don't have setuptools installed, `download it from PyPi | Alternatively, if you don't have setuptools installed, `download it from PyPi  | ||||||
| <http://pypi.python.org/pypi/mongoengine/>`_ and run | <http://pypi.python.org/pypi/mongoengine/>`_ and run | ||||||
|  |  | ||||||
| .. code-block:: console | .. code-block:: console | ||||||
|  |  | ||||||
|     $ python setup.py install |     # python setup.py install | ||||||
|  |  | ||||||
| To use the bleeding-edge version of MongoEngine, you can get the source from | To use the bleeding-edge version of MongoEngine, you can get the source from | ||||||
| `GitHub <http://github.com/hmarr/mongoengine/>`_ and install it as above: | `GitHub <http://github.com/hmarr/mongoengine/>`_ and install it as above: | ||||||
|  |      | ||||||
| .. code-block:: console | .. code-block:: console | ||||||
|  |  | ||||||
|     $ git clone git://github.com/hmarr/mongoengine |     # git clone git://github.com/hmarr/mongoengine | ||||||
|     $ cd mongoengine |     # cd mongoengine | ||||||
|     $ python setup.py install |     # python setup.py install | ||||||
|   | |||||||
| @@ -5,8 +5,8 @@ Querying the database | |||||||
| is used for accessing the objects in the database associated with the class. | is used for accessing the objects in the database associated with the class. | ||||||
| The :attr:`objects` attribute is actually a | The :attr:`objects` attribute is actually a | ||||||
| :class:`~mongoengine.queryset.QuerySetManager`, which creates and returns a new | :class:`~mongoengine.queryset.QuerySetManager`, which creates and returns a new | ||||||
| :class:`~mongoengine.queryset.QuerySet` object on access. The | a new :class:`~mongoengine.queryset.QuerySet` object on access. The | ||||||
| :class:`~mongoengine.queryset.QuerySet` object may be iterated over to | :class:`~mongoengine.queryset.QuerySet` object may may be iterated over to | ||||||
| fetch documents from the database:: | fetch documents from the database:: | ||||||
|  |  | ||||||
|     # Prints out the names of all the users in the database |     # Prints out the names of all the users in the database | ||||||
| @@ -14,7 +14,6 @@ fetch documents from the database:: | |||||||
|         print user.name |         print user.name | ||||||
|  |  | ||||||
| .. note:: | .. note:: | ||||||
|  |  | ||||||
|    Once the iteration finishes (when :class:`StopIteration` is raised), |    Once the iteration finishes (when :class:`StopIteration` is raised), | ||||||
|    :meth:`~mongoengine.queryset.QuerySet.rewind` will be called so that the |    :meth:`~mongoengine.queryset.QuerySet.rewind` will be called so that the | ||||||
|    :class:`~mongoengine.queryset.QuerySet` may be iterated over again. The |    :class:`~mongoengine.queryset.QuerySet` may be iterated over again. The | ||||||
| @@ -24,7 +23,7 @@ fetch documents from the database:: | |||||||
| Filtering queries | Filtering queries | ||||||
| ================= | ================= | ||||||
| The query may be filtered by calling the | The query may be filtered by calling the | ||||||
| :class:`~mongoengine.queryset.QuerySet` object with field lookup keyword | :class:`~mongoengine.queryset.QuerySet` object with field lookup keyword  | ||||||
| arguments. The keys in the keyword arguments correspond to fields on the | arguments. The keys in the keyword arguments correspond to fields on the | ||||||
| :class:`~mongoengine.Document` you are querying:: | :class:`~mongoengine.Document` you are querying:: | ||||||
|  |  | ||||||
| @@ -40,6 +39,29 @@ syntax:: | |||||||
|     # been written by a user whose 'country' field is set to 'uk' |     # been written by a user whose 'country' field is set to 'uk' | ||||||
|     uk_pages = Page.objects(author__country='uk') |     uk_pages = Page.objects(author__country='uk') | ||||||
|  |  | ||||||
|  | Querying lists | ||||||
|  | -------------- | ||||||
|  | On most fields, this syntax will look up documents where the field specified | ||||||
|  | matches the given value exactly, but when the field refers to a | ||||||
|  | :class:`~mongoengine.ListField`, a single item may be provided, in which case | ||||||
|  | lists that contain that item will be matched:: | ||||||
|  |  | ||||||
|  |     class Page(Document): | ||||||
|  |         tags = ListField(StringField()) | ||||||
|  |  | ||||||
|  |     # This will match all pages that have the word 'coding' as an item in the | ||||||
|  |     # 'tags' list | ||||||
|  |     Page.objects(tags='coding') | ||||||
|  |  | ||||||
|  | Raw queries | ||||||
|  | ----------- | ||||||
|  | It is possible to provide a raw PyMongo query as a query parameter, which will | ||||||
|  | be integrated directly into the query. This is done using the ``__raw__`` | ||||||
|  | keyword argument:: | ||||||
|  |  | ||||||
|  |     Page.objects(__raw__={'tags': 'coding'}) | ||||||
|  |  | ||||||
|  | .. versionadded:: 0.4 | ||||||
|  |  | ||||||
| Query operators | Query operators | ||||||
| =============== | =============== | ||||||
| @@ -62,7 +84,7 @@ Available operators are as follows: | |||||||
| * ``nin`` -- value is not in list (a list of values should be provided) | * ``nin`` -- value is not in list (a list of values should be provided) | ||||||
| * ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values | * ``mod`` -- ``value % x == y``, where ``x`` and ``y`` are two provided values | ||||||
| * ``all`` -- every item in list of values provided is in array | * ``all`` -- every item in list of values provided is in array | ||||||
| * ``size`` -- the size of the array is | * ``size`` -- the size of the array is  | ||||||
| * ``exists`` -- value for field exists | * ``exists`` -- value for field exists | ||||||
|  |  | ||||||
| The following operators are available as shortcuts to querying with regular | The following operators are available as shortcuts to querying with regular | ||||||
| @@ -76,69 +98,27 @@ expressions: | |||||||
| * ``istartswith`` -- string field starts with value (case insensitive) | * ``istartswith`` -- string field starts with value (case insensitive) | ||||||
| * ``endswith`` -- string field ends with value | * ``endswith`` -- string field ends with value | ||||||
| * ``iendswith`` -- string field ends with value (case insensitive) | * ``iendswith`` -- string field ends with value (case insensitive) | ||||||
| * ``match``  -- performs an $elemMatch so you can match an entire document within an array |  | ||||||
|  | .. versionadded:: 0.3 | ||||||
|  |  | ||||||
| There are a few special operators for performing geographical queries, that | There are a few special operators for performing geographical queries, that | ||||||
| may used with :class:`~mongoengine.GeoPointField`\ s: | may used with :class:`~mongoengine.GeoPointField`\ s: | ||||||
|  |  | ||||||
| * ``within_distance`` -- provide a list containing a point and a maximum | * ``within_distance`` -- provide a list containing a point and a maximum | ||||||
|   distance (e.g. [(41.342, -87.653), 5]) |   distance (e.g. [(41.342, -87.653), 5]) | ||||||
| * ``within_spherical_distance`` -- Same as above but using the spherical geo model |  | ||||||
|   (e.g. [(41.342, -87.653), 5/earth_radius]) |  | ||||||
| * ``near`` -- order the documents by how close they are to a given point |  | ||||||
| * ``near_sphere`` -- Same as above but using the spherical geo model |  | ||||||
| * ``within_box`` -- filter documents to those within a given bounding box (e.g. | * ``within_box`` -- filter documents to those within a given bounding box (e.g. | ||||||
|   [(35.0, -125.0), (40.0, -100.0)]) |   [(35.0, -125.0), (40.0, -100.0)]) | ||||||
| * ``within_polygon`` -- filter documents to those within a given polygon (e.g. | * ``near`` -- order the documents by how close they are to a given point | ||||||
|   [(41.91,-87.69), (41.92,-87.68), (41.91,-87.65), (41.89,-87.65)]). |  | ||||||
|   .. note:: Requires Mongo Server 2.0 |  | ||||||
|  |  | ||||||
|  | .. versionadded:: 0.4 | ||||||
|  |  | ||||||
| Querying lists | Querying by position | ||||||
| -------------- | ==================== | ||||||
| On most fields, this syntax will look up documents where the field specified |  | ||||||
| matches the given value exactly, but when the field refers to a |  | ||||||
| :class:`~mongoengine.ListField`, a single item may be provided, in which case |  | ||||||
| lists that contain that item will be matched:: |  | ||||||
|  |  | ||||||
|     class Page(Document): |  | ||||||
|         tags = ListField(StringField()) |  | ||||||
|  |  | ||||||
|     # This will match all pages that have the word 'coding' as an item in the |  | ||||||
|     # 'tags' list |  | ||||||
|     Page.objects(tags='coding') |  | ||||||
|  |  | ||||||
| It is possible to query by position in a list by using a numerical value as a | It is possible to query by position in a list by using a numerical value as a | ||||||
| query operator. So if you wanted to find all pages whose first tag was ``db``, | query operator. So if you wanted to find all pages whose first tag was ``db``, | ||||||
| you could use the following query:: | you could use the following query:: | ||||||
|  |  | ||||||
|     Page.objects(tags__0='db') |     BlogPost.objects(tags__0='db') | ||||||
|  |  | ||||||
| If you only want to fetch part of a list eg: you want to paginate a list, then |  | ||||||
| the `slice` operator is required:: |  | ||||||
|  |  | ||||||
|     # comments - skip 5, limit 10 |  | ||||||
|     Page.objects.fields(slice__comments=[5, 10]) |  | ||||||
|  |  | ||||||
| For updating documents, if you don't know the position in a list, you can use |  | ||||||
| the $ positional operator :: |  | ||||||
|  |  | ||||||
|     Post.objects(comments__by="joe").update(**{'inc__comments__$__votes': 1}) |  | ||||||
|  |  | ||||||
| However, this doesn't map well to the syntax so you can also use a capital S instead :: |  | ||||||
|  |  | ||||||
|     Post.objects(comments__by="joe").update(inc__comments__S__votes=1) |  | ||||||
|  |  | ||||||
|     .. note:: Due to Mongo currently the $ operator only applies to the first matched item in the query. |  | ||||||
|  |  | ||||||
|  |  | ||||||
| Raw queries |  | ||||||
| ----------- |  | ||||||
| It is possible to provide a raw PyMongo query as a query parameter, which will |  | ||||||
| be integrated directly into the query. This is done using the ``__raw__`` |  | ||||||
| keyword argument:: |  | ||||||
|  |  | ||||||
|     Page.objects(__raw__={'tags': 'coding'}) |  | ||||||
|  |  | ||||||
| .. versionadded:: 0.4 | .. versionadded:: 0.4 | ||||||
|  |  | ||||||
| @@ -183,9 +163,9 @@ To retrieve a result that should be unique in the collection, use | |||||||
| and :class:`~mongoengine.queryset.MultipleObjectsReturned` if more than one | and :class:`~mongoengine.queryset.MultipleObjectsReturned` if more than one | ||||||
| document matched the query. | document matched the query. | ||||||
|  |  | ||||||
| A variation of this method exists, | A variation of this method exists,  | ||||||
| :meth:`~mongoengine.queryset.Queryset.get_or_create`, that will create a new | :meth:`~mongoengine.queryset.Queryset.get_or_create`, that will create a new | ||||||
| document with the query arguments if no documents match the query. An | document with the query arguments if no documents match the query. An  | ||||||
| additional keyword argument, :attr:`defaults` may be provided, which will be | additional keyword argument, :attr:`defaults` may be provided, which will be | ||||||
| used as default values for the new document, in the case that it should need | used as default values for the new document, in the case that it should need | ||||||
| to be created:: | to be created:: | ||||||
| @@ -260,7 +240,7 @@ Javascript code that is executed on the database server. | |||||||
| Counting results | Counting results | ||||||
| ---------------- | ---------------- | ||||||
| Just as with limiting and skipping results, there is a method on | Just as with limiting and skipping results, there is a method on | ||||||
| :class:`~mongoengine.queryset.QuerySet` objects -- | :class:`~mongoengine.queryset.QuerySet` objects --  | ||||||
| :meth:`~mongoengine.queryset.QuerySet.count`, but there is also a more Pythonic | :meth:`~mongoengine.queryset.QuerySet.count`, but there is also a more Pythonic | ||||||
| way of achieving this:: | way of achieving this:: | ||||||
|  |  | ||||||
| @@ -274,7 +254,6 @@ You may sum over the values of a specific field on documents using | |||||||
|     yearly_expense = Employee.objects.sum('salary') |     yearly_expense = Employee.objects.sum('salary') | ||||||
|  |  | ||||||
| .. note:: | .. note:: | ||||||
|  |  | ||||||
|    If the field isn't present on a document, that document will be ignored from |    If the field isn't present on a document, that document will be ignored from | ||||||
|    the sum. |    the sum. | ||||||
|  |  | ||||||
| @@ -297,16 +276,8 @@ would be generating "tag-clouds":: | |||||||
|     from operator import itemgetter |     from operator import itemgetter | ||||||
|     top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10] |     top_tags = sorted(tag_freqs.items(), key=itemgetter(1), reverse=True)[:10] | ||||||
|  |  | ||||||
|  |  | ||||||
| Query efficiency and performance |  | ||||||
| ================================ |  | ||||||
|  |  | ||||||
| There are a couple of methods to improve efficiency when querying, reducing the |  | ||||||
| information returned by the query or efficient dereferencing . |  | ||||||
|  |  | ||||||
| Retrieving a subset of fields | Retrieving a subset of fields | ||||||
| ----------------------------- | ============================= | ||||||
|  |  | ||||||
| Sometimes a subset of fields on a :class:`~mongoengine.Document` is required, | Sometimes a subset of fields on a :class:`~mongoengine.Document` is required, | ||||||
| and for efficiency only these should be retrieved from the database. This issue | and for efficiency only these should be retrieved from the database. This issue | ||||||
| is especially important for MongoDB, as fields may often be extremely large | is especially important for MongoDB, as fields may often be extremely large | ||||||
| @@ -331,44 +302,18 @@ will be given:: | |||||||
|     >>> f.rating # default value |     >>> f.rating # default value | ||||||
|     3 |     3 | ||||||
|  |  | ||||||
| .. note:: |  | ||||||
|  |  | ||||||
|     The :meth:`~mongoengine.queryset.QuerySet.exclude` is the opposite of |  | ||||||
|     :meth:`~mongoengine.queryset.QuerySet.only` if you want to exclude a field. |  | ||||||
|  |  | ||||||
| If you later need the missing fields, just call | If you later need the missing fields, just call | ||||||
| :meth:`~mongoengine.Document.reload` on your document. | :meth:`~mongoengine.Document.reload` on your document. | ||||||
|  |  | ||||||
| Getting related data |  | ||||||
| -------------------- |  | ||||||
|  |  | ||||||
| When iterating the results of :class:`~mongoengine.ListField` or |  | ||||||
| :class:`~mongoengine.DictField` we automatically dereference any |  | ||||||
| :class:`~pymongo.dbref.DBRef` objects as efficiently as possible, reducing the |  | ||||||
| number the queries to mongo. |  | ||||||
|  |  | ||||||
| There are times when that efficiency is not enough, documents that have |  | ||||||
| :class:`~mongoengine.ReferenceField` objects or |  | ||||||
| :class:`~mongoengine.GenericReferenceField` objects at the top level are |  | ||||||
| expensive as the number of queries to MongoDB can quickly rise. |  | ||||||
|  |  | ||||||
| To limit the number of queries use |  | ||||||
| :func:`~mongoengine.queryset.QuerySet.select_related` which converts the |  | ||||||
| QuerySet to a list and dereferences as efficiently as possible.  By default |  | ||||||
| :func:`~mongoengine.queryset.QuerySet.select_related` only dereferences any |  | ||||||
| references to the depth of 1 level.  If you have more complicated documents and |  | ||||||
| want to dereference more of the object at once then increasing the :attr:`max_depth` |  | ||||||
| will dereference more levels of the document. |  | ||||||
|  |  | ||||||
| Advanced queries | Advanced queries | ||||||
| ================ | ================ | ||||||
| Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword | Sometimes calling a :class:`~mongoengine.queryset.QuerySet` object with keyword | ||||||
| arguments can't fully express the query you want to use -- for example if you | arguments can't fully express the query you want to use -- for example if you | ||||||
| need to combine a number of constraints using *and* and *or*. This is made | need to combine a number of constraints using *and* and *or*. This is made  | ||||||
| possible in MongoEngine through the :class:`~mongoengine.queryset.Q` class. | possible in MongoEngine through the :class:`~mongoengine.queryset.Q` class. | ||||||
| A :class:`~mongoengine.queryset.Q` object represents part of a query, and | A :class:`~mongoengine.queryset.Q` object represents part of a query, and | ||||||
| can be initialised using the same keyword-argument syntax you use to query | can be initialised using the same keyword-argument syntax you use to query | ||||||
| documents. To build a complex query, you may combine | documents. To build a complex query, you may combine  | ||||||
| :class:`~mongoengine.queryset.Q` objects using the ``&`` (and) and ``|`` (or) | :class:`~mongoengine.queryset.Q` objects using the ``&`` (and) and ``|`` (or) | ||||||
| operators. To use a :class:`~mongoengine.queryset.Q` object, pass it in as the | operators. To use a :class:`~mongoengine.queryset.Q` object, pass it in as the | ||||||
| first positional argument to :attr:`Document.objects` when you filter it by | first positional argument to :attr:`Document.objects` when you filter it by | ||||||
| @@ -380,66 +325,11 @@ calling it with keyword arguments:: | |||||||
|     # Get top posts |     # Get top posts | ||||||
|     Post.objects((Q(featured=True) & Q(hits__gte=1000)) | Q(hits__gte=5000)) |     Post.objects((Q(featured=True) & Q(hits__gte=1000)) | Q(hits__gte=5000)) | ||||||
|  |  | ||||||
| .. _guide-atomic-updates: | .. warning:: | ||||||
|  |    Only use these advanced queries if absolutely necessary as they will execute | ||||||
| Atomic updates |    significantly slower than regular queries. This is because they are not | ||||||
| ============== |    natively supported by MongoDB -- they are compiled to Javascript and sent | ||||||
| Documents may be updated atomically by using the |    to the server for execution. | ||||||
| :meth:`~mongoengine.queryset.QuerySet.update_one` and |  | ||||||
| :meth:`~mongoengine.queryset.QuerySet.update` methods on a |  | ||||||
| :meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers" |  | ||||||
| that you may use with these methods: |  | ||||||
|  |  | ||||||
| * ``set`` -- set a particular value |  | ||||||
| * ``unset`` -- delete a particular value (since MongoDB v1.3+) |  | ||||||
| * ``inc`` -- increment a value by a given amount |  | ||||||
| * ``dec`` -- decrement a value by a given amount |  | ||||||
| * ``pop`` -- remove the last item from a list |  | ||||||
| * ``push`` -- append a value to a list |  | ||||||
| * ``push_all`` -- append several values to a list |  | ||||||
| * ``pop`` -- remove the first or last element of a list |  | ||||||
| * ``pull`` -- remove a value from a list |  | ||||||
| * ``pull_all`` -- remove several values from a list |  | ||||||
| * ``add_to_set`` -- add value to a list only if its not in the list already |  | ||||||
|  |  | ||||||
| The syntax for atomic updates is similar to the querying syntax, but the |  | ||||||
| modifier comes before the field, not after it:: |  | ||||||
|  |  | ||||||
|     >>> post = BlogPost(title='Test', page_views=0, tags=['database']) |  | ||||||
|     >>> post.save() |  | ||||||
|     >>> BlogPost.objects(id=post.id).update_one(inc__page_views=1) |  | ||||||
|     >>> post.reload()  # the document has been changed, so we need to reload it |  | ||||||
|     >>> post.page_views |  | ||||||
|     1 |  | ||||||
|     >>> BlogPost.objects(id=post.id).update_one(set__title='Example Post') |  | ||||||
|     >>> post.reload() |  | ||||||
|     >>> post.title |  | ||||||
|     'Example Post' |  | ||||||
|     >>> BlogPost.objects(id=post.id).update_one(push__tags='nosql') |  | ||||||
|     >>> post.reload() |  | ||||||
|     >>> post.tags |  | ||||||
|     ['database', 'nosql'] |  | ||||||
|  |  | ||||||
| .. note :: |  | ||||||
|  |  | ||||||
|     In version 0.5 the :meth:`~mongoengine.Document.save` runs atomic updates |  | ||||||
|     on changed documents by tracking changes to that document. |  | ||||||
|  |  | ||||||
| The positional operator allows you to update list items without knowing the |  | ||||||
| index position, therefore making the update a single atomic operation.  As we |  | ||||||
| cannot use the `$` syntax in keyword arguments it has been mapped to `S`:: |  | ||||||
|  |  | ||||||
|     >>> post = BlogPost(title='Test', page_views=0, tags=['database', 'mongo']) |  | ||||||
|     >>> post.save() |  | ||||||
|     >>> BlogPost.objects(id=post.id, tags='mongo').update(set__tags__S='mongodb') |  | ||||||
|     >>> post.reload() |  | ||||||
|     >>> post.tags |  | ||||||
|     ['database', 'mongodb'] |  | ||||||
|  |  | ||||||
| .. note :: |  | ||||||
|     Currently only top level lists are handled, future versions of mongodb / |  | ||||||
|     pymongo plan to support nested positional operators.  See `The $ positional |  | ||||||
|     operator <http://www.mongodb.org/display/DOCS/Updating#Updating-The%24positionaloperator>`_. |  | ||||||
|  |  | ||||||
| Server-side javascript execution | Server-side javascript execution | ||||||
| ================================ | ================================ | ||||||
| @@ -543,3 +433,43 @@ following example shows how the substitutions are made:: | |||||||
|         return comments; |         return comments; | ||||||
|     } |     } | ||||||
|     """) |     """) | ||||||
|  |  | ||||||
|  | .. _guide-atomic-updates: | ||||||
|  |  | ||||||
|  | Atomic updates | ||||||
|  | ============== | ||||||
|  | Documents may be updated atomically by using the | ||||||
|  | :meth:`~mongoengine.queryset.QuerySet.update_one` and | ||||||
|  | :meth:`~mongoengine.queryset.QuerySet.update` methods on a  | ||||||
|  | :meth:`~mongoengine.queryset.QuerySet`. There are several different "modifiers" | ||||||
|  | that you may use with these methods: | ||||||
|  |  | ||||||
|  | * ``set`` -- set a particular value | ||||||
|  | * ``unset`` -- delete a particular value (since MongoDB v1.3+) | ||||||
|  | * ``inc`` -- increment a value by a given amount | ||||||
|  | * ``dec`` -- decrement a value by a given amount | ||||||
|  | * ``pop`` -- remove the last item from a list | ||||||
|  | * ``push`` -- append a value to a list | ||||||
|  | * ``push_all`` -- append several values to a list | ||||||
|  | * ``pop`` -- remove the first or last element of a list | ||||||
|  | * ``pull`` -- remove a value from a list | ||||||
|  | * ``pull_all`` -- remove several values from a list | ||||||
|  | * ``add_to_set`` -- add value to a list only if its not in the list already | ||||||
|  |  | ||||||
|  | The syntax for atomic updates is similar to the querying syntax, but the  | ||||||
|  | modifier comes before the field, not after it:: | ||||||
|  |  | ||||||
|  |     >>> post = BlogPost(title='Test', page_views=0, tags=['database']) | ||||||
|  |     >>> post.save() | ||||||
|  |     >>> BlogPost.objects(id=post.id).update_one(inc__page_views=1) | ||||||
|  |     >>> post.reload()  # the document has been changed, so we need to reload it | ||||||
|  |     >>> post.page_views | ||||||
|  |     1 | ||||||
|  |     >>> BlogPost.objects(id=post.id).update_one(set__title='Example Post') | ||||||
|  |     >>> post.reload() | ||||||
|  |     >>> post.title | ||||||
|  |     'Example Post' | ||||||
|  |     >>> BlogPost.objects(id=post.id).update_one(push__tags='nosql') | ||||||
|  |     >>> post.reload() | ||||||
|  |     >>> post.tags | ||||||
|  |     ['database', 'nosql'] | ||||||
|   | |||||||
| @@ -1,53 +0,0 @@ | |||||||
| .. _signals: |  | ||||||
|  |  | ||||||
| Signals |  | ||||||
| ======= |  | ||||||
|  |  | ||||||
| .. versionadded:: 0.5 |  | ||||||
|  |  | ||||||
| .. note:: |  | ||||||
|  |  | ||||||
|   Signal support is provided by the excellent `blinker`_ library and |  | ||||||
|   will gracefully fall back if it is not available. |  | ||||||
|  |  | ||||||
|  |  | ||||||
| The following document signals exist in MongoEngine and are pretty self-explanatory: |  | ||||||
|  |  | ||||||
|   * `mongoengine.signals.pre_init` |  | ||||||
|   * `mongoengine.signals.post_init` |  | ||||||
|   * `mongoengine.signals.pre_save` |  | ||||||
|   * `mongoengine.signals.post_save` |  | ||||||
|   * `mongoengine.signals.pre_delete` |  | ||||||
|   * `mongoengine.signals.post_delete` |  | ||||||
|   * `mongoengine.signals.pre_bulk_insert` |  | ||||||
|   * `mongoengine.signals.post_bulk_insert` |  | ||||||
|  |  | ||||||
| Example usage:: |  | ||||||
|  |  | ||||||
|     from mongoengine import * |  | ||||||
|     from mongoengine import signals |  | ||||||
|  |  | ||||||
|     class Author(Document): |  | ||||||
|         name = StringField() |  | ||||||
|  |  | ||||||
|         def __unicode__(self): |  | ||||||
|             return self.name |  | ||||||
|  |  | ||||||
|         @classmethod |  | ||||||
|         def pre_save(cls, sender, document, **kwargs): |  | ||||||
|             logging.debug("Pre Save: %s" % document.name) |  | ||||||
|  |  | ||||||
|         @classmethod |  | ||||||
|         def post_save(cls, sender, document, **kwargs): |  | ||||||
|             logging.debug("Post Save: %s" % document.name) |  | ||||||
|             if 'created' in kwargs: |  | ||||||
|                 if kwargs['created']: |  | ||||||
|                     logging.debug("Created") |  | ||||||
|                 else: |  | ||||||
|                     logging.debug("Updated") |  | ||||||
|  |  | ||||||
|     signals.pre_save.connect(Author.pre_save, sender=Author) |  | ||||||
|     signals.post_save.connect(Author.post_save, sender=Author) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| .. _blinker: http://pypi.python.org/pypi/blinker |  | ||||||
| @@ -2,66 +2,34 @@ | |||||||
| MongoEngine User Documentation | MongoEngine User Documentation | ||||||
| ============================== | ============================== | ||||||
|  |  | ||||||
| **MongoEngine** is an Object-Document Mapper, written in Python for working with | MongoEngine is an Object-Document Mapper, written in Python for working with  | ||||||
| MongoDB. To install it, simply run | MongoDB. To install it, simply run | ||||||
|  |  | ||||||
| .. code-block:: console | .. code-block:: console | ||||||
|  |  | ||||||
|     # pip install -U mongoengine |     # pip install -U mongoengine | ||||||
|  |  | ||||||
| :doc:`tutorial` | The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_. | ||||||
|   Start here for a quick overview. |  | ||||||
|  |  | ||||||
| :doc:`guide/index` |  | ||||||
|   The Full guide to MongoEngine |  | ||||||
|  |  | ||||||
| :doc:`apireference` |  | ||||||
|   The complete API documentation. |  | ||||||
|  |  | ||||||
| :doc:`upgrade` |  | ||||||
|   How to upgrade MongoEngine. |  | ||||||
|  |  | ||||||
| :doc:`django` |  | ||||||
|   Using MongoEngine and Django |  | ||||||
|  |  | ||||||
| Community |  | ||||||
| --------- |  | ||||||
|  |  | ||||||
| To get help with using MongoEngine, use the `MongoEngine Users mailing list | To get help with using MongoEngine, use the `MongoEngine Users mailing list | ||||||
| <http://groups.google.com/group/mongoengine-users>`_ or come chat on the | <http://groups.google.com/group/mongoengine-users>`_ or come chat on the | ||||||
| `#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_. | `#mongoengine IRC channel <irc://irc.freenode.net/mongoengine>`_. | ||||||
|  |  | ||||||
| Contributing | If you are interested in contributing, join the developers' `mailing list  | ||||||
| ------------ |  | ||||||
|  |  | ||||||
| The source is available on `GitHub <http://github.com/hmarr/mongoengine>`_ and |  | ||||||
| contributions are always encouraged. Contributions can be as simple as |  | ||||||
| minor tweaks to this documentation. To contribute, fork the project on |  | ||||||
| `GitHub <http://github.com/hmarr/mongoengine>`_ and send a |  | ||||||
| pull request. |  | ||||||
|  |  | ||||||
| Also, you can join the developers' `mailing list |  | ||||||
| <http://groups.google.com/group/mongoengine-dev>`_. | <http://groups.google.com/group/mongoengine-dev>`_. | ||||||
|  |  | ||||||
| Changes |  | ||||||
| ------- |  | ||||||
| See the :doc:`changelog` for a full list of changes to MongoEngine and |  | ||||||
| :doc:`upgrade` for upgrade information. |  | ||||||
|  |  | ||||||
| .. toctree:: | .. toctree:: | ||||||
|    :hidden: |    :maxdepth: 2 | ||||||
|  |  | ||||||
|    tutorial |    tutorial | ||||||
|    guide/index |    guide/index | ||||||
|    apireference |    apireference | ||||||
|    django |    django | ||||||
|    changelog |    changelog | ||||||
|    upgrade |  | ||||||
|  |  | ||||||
| Indices and tables | Indices and tables | ||||||
| ------------------ | ================== | ||||||
|  |  | ||||||
| * :ref:`genindex` | * :ref:`genindex` | ||||||
| * :ref:`modindex` |  | ||||||
| * :ref:`search` | * :ref:`search` | ||||||
|  |  | ||||||
|   | |||||||
| @@ -22,7 +22,7 @@ function. The only argument we need to provide is the name of the MongoDB | |||||||
| database to use:: | database to use:: | ||||||
|  |  | ||||||
|     from mongoengine import * |     from mongoengine import * | ||||||
|  |      | ||||||
|     connect('tumblelog') |     connect('tumblelog') | ||||||
|  |  | ||||||
| For more information about connecting to MongoDB see :ref:`guide-connecting`. | For more information about connecting to MongoDB see :ref:`guide-connecting`. | ||||||
| @@ -112,7 +112,7 @@ link table, we can just store a list of tags in each post. So, for both | |||||||
| efficiency and simplicity's sake, we'll store the tags as strings directly | efficiency and simplicity's sake, we'll store the tags as strings directly | ||||||
| within the post, rather than storing references to tags in a separate | within the post, rather than storing references to tags in a separate | ||||||
| collection. Especially as tags are generally very short (often even shorter | collection. Especially as tags are generally very short (often even shorter | ||||||
| than a document's id), this denormalisation won't impact very strongly on the | than a document's id), this denormalisation won't impact very strongly on the  | ||||||
| size of our database. So let's take a look that the code our modified | size of our database. So let's take a look that the code our modified | ||||||
| :class:`Post` class:: | :class:`Post` class:: | ||||||
|  |  | ||||||
| @@ -152,26 +152,6 @@ We can then store a list of comment documents in our post document:: | |||||||
|         tags = ListField(StringField(max_length=30)) |         tags = ListField(StringField(max_length=30)) | ||||||
|         comments = ListField(EmbeddedDocumentField(Comment)) |         comments = ListField(EmbeddedDocumentField(Comment)) | ||||||
|  |  | ||||||
| Handling deletions of references |  | ||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |  | ||||||
|  |  | ||||||
| The :class:`~mongoengine.ReferenceField` object takes a keyword |  | ||||||
| `reverse_delete_rule` for handling deletion rules if the reference is deleted. |  | ||||||
| To delete all the posts if a user is deleted set the rule:: |  | ||||||
|  |  | ||||||
|     class Post(Document): |  | ||||||
|         title = StringField(max_length=120, required=True) |  | ||||||
|         author = ReferenceField(User, reverse_delete_rule=CASCADE) |  | ||||||
|         tags = ListField(StringField(max_length=30)) |  | ||||||
|         comments = ListField(EmbeddedDocumentField(Comment)) |  | ||||||
|  |  | ||||||
| See :class:`~mongoengine.ReferenceField` for more information. |  | ||||||
|  |  | ||||||
| ..note:: |  | ||||||
|     MapFields and DictFields currently don't support automatic handling of |  | ||||||
|     deleted references |  | ||||||
|  |  | ||||||
|  |  | ||||||
| Adding data to our Tumblelog | Adding data to our Tumblelog | ||||||
| ============================ | ============================ | ||||||
| Now that we've defined how our documents will be structured, let's start adding | Now that we've defined how our documents will be structured, let's start adding | ||||||
| @@ -270,5 +250,5 @@ the first matched by the query you provide. Aggregation functions may also be | |||||||
| used on :class:`~mongoengine.queryset.QuerySet` objects:: | used on :class:`~mongoengine.queryset.QuerySet` objects:: | ||||||
|  |  | ||||||
|     num_posts = Post.objects(tags='mongodb').count() |     num_posts = Post.objects(tags='mongodb').count() | ||||||
|     print 'Found %d posts with tag "mongodb"' % num_posts |     print 'Found % posts with tag "mongodb"' % num_posts | ||||||
|  |      | ||||||
|   | |||||||
							
								
								
									
										115
									
								
								docs/upgrade.rst
									
									
									
									
									
								
							
							
						
						
									
										115
									
								
								docs/upgrade.rst
									
									
									
									
									
								
							| @@ -1,115 +0,0 @@ | |||||||
| ========= |  | ||||||
| Upgrading |  | ||||||
| ========= |  | ||||||
|  |  | ||||||
| 0.5 to 0.6 |  | ||||||
| ========== |  | ||||||
|  |  | ||||||
| Embedded Documents - if you had a `pk` field you will have to rename it from `_id` |  | ||||||
| to `pk` as pk is no longer a property of Embedded Documents. |  | ||||||
|  |  | ||||||
| Reverse Delete Rules in Embedded Documents, MapFields and DictFields now throw |  | ||||||
| an InvalidDocument error as they aren't currently supported. |  | ||||||
|  |  | ||||||
| Document._get_subclasses - Is no longer used and the class method has been removed. |  | ||||||
|  |  | ||||||
| Document.objects.with_id - now raises an InvalidQueryError if used with a filter. |  | ||||||
|  |  | ||||||
| FutureWarning - A future warning has been added to all inherited classes that |  | ||||||
| don't define `allow_inheritance` in their meta. |  | ||||||
|  |  | ||||||
| You may need to update pyMongo to 2.0 for use with Sharding. |  | ||||||
|  |  | ||||||
| 0.4 to 0.5 |  | ||||||
| =========== |  | ||||||
|  |  | ||||||
| There have been the following backwards incompatibilities from 0.4 to 0.5.  The |  | ||||||
| main areas of changed are: choices in fields, map_reduce and collection names. |  | ||||||
|  |  | ||||||
| Choice options: |  | ||||||
| --------------- |  | ||||||
|  |  | ||||||
| Are now expected to be an iterable of tuples, with  the first element in each |  | ||||||
| tuple being the actual value to be stored. The second element is the |  | ||||||
| human-readable name for the option. |  | ||||||
|  |  | ||||||
|  |  | ||||||
| PyMongo / MongoDB |  | ||||||
| ----------------- |  | ||||||
|  |  | ||||||
| map reduce now requires pymongo 1.11+- The pymongo merge_output and reduce_output |  | ||||||
| parameters, have been depreciated. |  | ||||||
|  |  | ||||||
| More methods now use map_reduce as db.eval is not supported for sharding as such |  | ||||||
| the following have been changed: |  | ||||||
|  |  | ||||||
|     * :meth:`~mongoengine.queryset.QuerySet.sum` |  | ||||||
|     * :meth:`~mongoengine.queryset.QuerySet.average` |  | ||||||
|     * :meth:`~mongoengine.queryset.QuerySet.item_frequencies` |  | ||||||
|  |  | ||||||
|  |  | ||||||
| Default collection naming |  | ||||||
| ------------------------- |  | ||||||
|  |  | ||||||
| Previously it was just lowercase, its now much more pythonic and readable as its |  | ||||||
| lowercase and underscores, previously :: |  | ||||||
|  |  | ||||||
|     class MyAceDocument(Document): |  | ||||||
|         pass |  | ||||||
|  |  | ||||||
|     MyAceDocument._meta['collection'] == myacedocument |  | ||||||
|  |  | ||||||
| In 0.5 this will change to :: |  | ||||||
|  |  | ||||||
|     class MyAceDocument(Document): |  | ||||||
|         pass |  | ||||||
|  |  | ||||||
|     MyAceDocument._get_collection_name() == my_ace_document |  | ||||||
|  |  | ||||||
| To upgrade use a Mixin class to set meta like so :: |  | ||||||
|  |  | ||||||
|     class BaseMixin(object): |  | ||||||
|         meta = { |  | ||||||
|             'collection': lambda c: c.__name__.lower() |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|     class MyAceDocument(Document, BaseMixin): |  | ||||||
|         pass |  | ||||||
|  |  | ||||||
|     MyAceDocument._get_collection_name() == "myacedocument" |  | ||||||
|  |  | ||||||
| Alternatively, you can rename your collections eg :: |  | ||||||
|  |  | ||||||
|     from mongoengine.connection import _get_db |  | ||||||
|     from mongoengine.base import _document_registry |  | ||||||
|  |  | ||||||
|     def rename_collections(): |  | ||||||
|         db = _get_db() |  | ||||||
|  |  | ||||||
|         failure = False |  | ||||||
|  |  | ||||||
|         collection_names = [d._get_collection_name() for d in _document_registry.values()] |  | ||||||
|  |  | ||||||
|         for new_style_name in collection_names: |  | ||||||
|             if not new_style_name:  # embedded documents don't have collections |  | ||||||
|                 continue |  | ||||||
|             old_style_name = new_style_name.replace('_', '') |  | ||||||
|  |  | ||||||
|             if old_style_name == new_style_name: |  | ||||||
|                 continue  # Nothing to do |  | ||||||
|  |  | ||||||
|             existing = db.collection_names() |  | ||||||
|             if old_style_name in existing: |  | ||||||
|                 if new_style_name in existing: |  | ||||||
|                     failure = True |  | ||||||
|                     print "FAILED to rename: %s to %s (already exists)" % ( |  | ||||||
|                         old_style_name, new_style_name) |  | ||||||
|                 else: |  | ||||||
|                     db[old_style_name].rename(new_style_name) |  | ||||||
|                     print "Renamed:  %s to %s" % (old_style_name, new_style_name) |  | ||||||
|  |  | ||||||
|         if failure: |  | ||||||
|             print "Upgrading  collection names failed" |  | ||||||
|         else: |  | ||||||
|             print "Upgraded collection names" |  | ||||||
|  |  | ||||||
| @@ -6,14 +6,13 @@ import connection | |||||||
| from connection import * | from connection import * | ||||||
| import queryset | import queryset | ||||||
| from queryset import * | from queryset import * | ||||||
| import signals |  | ||||||
| from signals import * |  | ||||||
|  |  | ||||||
| __all__ = (document.__all__ + fields.__all__ + connection.__all__ + | __all__ = (document.__all__ + fields.__all__ + connection.__all__ + | ||||||
|            queryset.__all__ + signals.__all__) |            queryset.__all__) | ||||||
|  |  | ||||||
| VERSION = (0, 6, 16) | __author__ = 'Harry Marr' | ||||||
|  |  | ||||||
|  | VERSION = (0, 4, 1) | ||||||
|  |  | ||||||
| def get_version(): | def get_version(): | ||||||
|     version = '%s.%s' % (VERSION[0], VERSION[1]) |     version = '%s.%s' % (VERSION[0], VERSION[1]) | ||||||
| @@ -22,3 +21,4 @@ def get_version(): | |||||||
|     return version |     return version | ||||||
|  |  | ||||||
| __version__ = get_version() | __version__ = get_version() | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										1148
									
								
								mongoengine/base.py
									
									
									
									
									
								
							
							
						
						
									
										1148
									
								
								mongoengine/base.py
									
									
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,166 +1,71 @@ | |||||||
| import pymongo | from pymongo import Connection | ||||||
| from pymongo import Connection, ReplicaSetConnection, uri_parser | import multiprocessing | ||||||
|  |  | ||||||
|  | __all__ = ['ConnectionError', 'connect'] | ||||||
|  |  | ||||||
|  |  | ||||||
| __all__ = ['ConnectionError', 'connect', 'register_connection', | _connection_defaults = { | ||||||
|            'DEFAULT_CONNECTION_NAME'] |     'host': 'localhost', | ||||||
|  |     'port': 27017, | ||||||
|  | } | ||||||
|  | _connection = {} | ||||||
|  | _connection_settings = _connection_defaults.copy() | ||||||
|  |  | ||||||
|  | _db_name = None | ||||||
| DEFAULT_CONNECTION_NAME = 'default' | _db_username = None | ||||||
|  | _db_password = None | ||||||
|  | _db = {} | ||||||
|  |  | ||||||
|  |  | ||||||
| class ConnectionError(Exception): | class ConnectionError(Exception): | ||||||
|     pass |     pass | ||||||
|  |  | ||||||
|  |  | ||||||
| _connection_settings = {} | def _get_connection(reconnect=False): | ||||||
| _connections = {} |     global _connection | ||||||
| _dbs = {} |     identity = get_identity() | ||||||
|  |  | ||||||
|  |  | ||||||
| def register_connection(alias, name, host='localhost', port=27017, |  | ||||||
|                         is_slave=False, read_preference=False, slaves=None, |  | ||||||
|                         username=None, password=None, **kwargs): |  | ||||||
|     """Add a connection. |  | ||||||
|  |  | ||||||
|     :param alias: the name that will be used to refer to this connection |  | ||||||
|         throughout MongoEngine |  | ||||||
|     :param name: the name of the specific database to use |  | ||||||
|     :param host: the host name of the :program:`mongod` instance to connect to |  | ||||||
|     :param port: the port that the :program:`mongod` instance is running on |  | ||||||
|     :param is_slave: whether the connection can act as a slave ** Depreciated pymongo 2.0.1+ |  | ||||||
|     :param read_preference: The read preference for the collection ** Added pymongo 2.1 |  | ||||||
|     :param slaves: a list of aliases of slave connections; each of these must |  | ||||||
|         be a registered connection that has :attr:`is_slave` set to ``True`` |  | ||||||
|     :param username: username to authenticate with |  | ||||||
|     :param password: password to authenticate with |  | ||||||
|     :param kwargs: allow ad-hoc parameters to be passed into the pymongo driver |  | ||||||
|  |  | ||||||
|     """ |  | ||||||
|     global _connection_settings |  | ||||||
|  |  | ||||||
|     conn_settings = { |  | ||||||
|         'name': name, |  | ||||||
|         'host': host, |  | ||||||
|         'port': port, |  | ||||||
|         'is_slave': is_slave, |  | ||||||
|         'slaves': slaves or [], |  | ||||||
|         'username': username, |  | ||||||
|         'password': password, |  | ||||||
|         'read_preference': read_preference |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     # Handle uri style connections |  | ||||||
|     if "://" in host: |  | ||||||
|         uri_dict = uri_parser.parse_uri(host) |  | ||||||
|         if uri_dict.get('database') is None: |  | ||||||
|             raise ConnectionError("If using URI style connection include "\ |  | ||||||
|                                   "database name in string") |  | ||||||
|         conn_settings.update({ |  | ||||||
|             'host': host, |  | ||||||
|             'name': uri_dict.get('database'), |  | ||||||
|             'username': uri_dict.get('username'), |  | ||||||
|             'password': uri_dict.get('password'), |  | ||||||
|             'read_preference': read_preference, |  | ||||||
|         }) |  | ||||||
|         if "replicaSet" in host: |  | ||||||
|             conn_settings['replicaSet'] = True |  | ||||||
|  |  | ||||||
|     conn_settings.update(kwargs) |  | ||||||
|     _connection_settings[alias] = conn_settings |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def disconnect(alias=DEFAULT_CONNECTION_NAME): |  | ||||||
|     global _connections |  | ||||||
|     global _dbs |  | ||||||
|  |  | ||||||
|     if alias in _connections: |  | ||||||
|         get_connection(alias=alias).disconnect() |  | ||||||
|         del _connections[alias] |  | ||||||
|     if alias in _dbs: |  | ||||||
|         del _dbs[alias] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False): |  | ||||||
|     global _connections |  | ||||||
|     # Connect to the database if not already connected |     # Connect to the database if not already connected | ||||||
|     if reconnect: |     if _connection.get(identity) is None or reconnect: | ||||||
|         disconnect(alias) |  | ||||||
|  |  | ||||||
|     if alias not in _connections: |  | ||||||
|         if alias not in _connection_settings: |  | ||||||
|             msg = 'Connection with alias "%s" has not been defined' % alias |  | ||||||
|             if alias == DEFAULT_CONNECTION_NAME: |  | ||||||
|                 msg = 'You have not defined a default connection' |  | ||||||
|             raise ConnectionError(msg) |  | ||||||
|         conn_settings = _connection_settings[alias].copy() |  | ||||||
|  |  | ||||||
|         if hasattr(pymongo, 'version_tuple'):  # Support for 2.1+ |  | ||||||
|             conn_settings.pop('name', None) |  | ||||||
|             conn_settings.pop('slaves', None) |  | ||||||
|             conn_settings.pop('is_slave', None) |  | ||||||
|             conn_settings.pop('username', None) |  | ||||||
|             conn_settings.pop('password', None) |  | ||||||
|         else: |  | ||||||
|             # Get all the slave connections |  | ||||||
|             if 'slaves' in conn_settings: |  | ||||||
|                 slaves = [] |  | ||||||
|                 for slave_alias in conn_settings['slaves']: |  | ||||||
|                     slaves.append(get_connection(slave_alias)) |  | ||||||
|                 conn_settings['slaves'] = slaves |  | ||||||
|                 conn_settings.pop('read_preference', None) |  | ||||||
|  |  | ||||||
|         connection_class = Connection |  | ||||||
|         if 'replicaSet' in conn_settings: |  | ||||||
|             conn_settings['hosts_or_uri'] = conn_settings.pop('host', None) |  | ||||||
|             # Discard port since it can't be used on ReplicaSetConnection |  | ||||||
|             conn_settings.pop('port', None) |  | ||||||
|             # Discard replicaSet if not base string |  | ||||||
|             if not isinstance(conn_settings['replicaSet'], basestring): |  | ||||||
|                 conn_settings.pop('replicaSet', None) |  | ||||||
|             connection_class = ReplicaSetConnection |  | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             _connections[alias] = connection_class(**conn_settings) |             _connection[identity] = Connection(**_connection_settings) | ||||||
|         except Exception, e: |         except: | ||||||
|             raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e)) |             raise ConnectionError('Cannot connect to the database') | ||||||
|     return _connections[alias] |     return _connection[identity] | ||||||
|  |  | ||||||
|  | def _get_db(reconnect=False): | ||||||
|  |     global _db, _connection | ||||||
|  |     identity = get_identity() | ||||||
|  |     # Connect if not already connected | ||||||
|  |     if _connection.get(identity) is None or reconnect: | ||||||
|  |         _connection[identity] = _get_connection(reconnect=reconnect) | ||||||
|  |  | ||||||
| def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False): |     if _db.get(identity) is None or reconnect: | ||||||
|     global _dbs |         # _db_name will be None if the user hasn't called connect() | ||||||
|     if reconnect: |         if _db_name is None: | ||||||
|         disconnect(alias) |             raise ConnectionError('Not connected to the database') | ||||||
|  |  | ||||||
|     if alias not in _dbs: |         # Get DB from current connection and authenticate if necessary | ||||||
|         conn = get_connection(alias) |         _db[identity] = _connection[identity][_db_name] | ||||||
|         conn_settings = _connection_settings[alias] |         if _db_username and _db_password: | ||||||
|         _dbs[alias] = conn[conn_settings['name']] |             _db[identity].authenticate(_db_username, _db_password) | ||||||
|         # Authenticate if necessary |  | ||||||
|         if conn_settings['username'] and conn_settings['password']: |  | ||||||
|             _dbs[alias].authenticate(conn_settings['username'], |  | ||||||
|                                      conn_settings['password']) |  | ||||||
|     return _dbs[alias] |  | ||||||
|  |  | ||||||
|  |     return _db[identity] | ||||||
|  |  | ||||||
| def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs): | def get_identity(): | ||||||
|     """Connect to the database specified by the 'db' argument. |     identity = multiprocessing.current_process()._identity | ||||||
|  |     identity = 0 if not identity else identity[0] | ||||||
|     Connection settings may be provided here as well if the database is not |     return identity | ||||||
|     running on the default port on localhost. If authentication is needed, |      | ||||||
|     provide username and password arguments as well. | def connect(db, username=None, password=None, **kwargs): | ||||||
|  |     """Connect to the database specified by the 'db' argument. Connection  | ||||||
|     Multiple databases are supported by using aliases.  Provide a separate |     settings may be provided here as well if the database is not running on | ||||||
|     `alias` to connect to a different instance of :program:`mongod`. |     the default port on localhost. If authentication is needed, provide | ||||||
|  |     username and password arguments as well. | ||||||
|     .. versionchanged:: 0.6 - added multiple database support. |  | ||||||
|     """ |     """ | ||||||
|     global _connections |     global _connection_settings, _db_name, _db_username, _db_password, _db | ||||||
|     if alias not in _connections: |     _connection_settings = dict(_connection_defaults, **kwargs) | ||||||
|         register_connection(alias, db, **kwargs) |     _db_name = db | ||||||
|  |     _db_username = username | ||||||
|  |     _db_password = password | ||||||
|  |     return _get_db(reconnect=True) | ||||||
|  |  | ||||||
|     return get_connection(alias) |  | ||||||
|  |  | ||||||
| # Support old naming convention |  | ||||||
| _get_connection = get_connection |  | ||||||
| _get_db = get_db |  | ||||||
|   | |||||||
| @@ -1,192 +0,0 @@ | |||||||
| from bson import DBRef, SON |  | ||||||
|  |  | ||||||
| from base import (BaseDict, BaseList, TopLevelDocumentMetaclass, get_document) |  | ||||||
| from fields import (ReferenceField, ListField, DictField, MapField) |  | ||||||
| from connection import get_db |  | ||||||
| from queryset import QuerySet |  | ||||||
| from document import Document |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class DeReference(object): |  | ||||||
|  |  | ||||||
|     def __call__(self, items, max_depth=1, instance=None, name=None): |  | ||||||
|         """ |  | ||||||
|         Cheaply dereferences the items to a set depth. |  | ||||||
|         Also handles the convertion of complex data types. |  | ||||||
|  |  | ||||||
|         :param items: The iterable (dict, list, queryset) to be dereferenced. |  | ||||||
|         :param max_depth: The maximum depth to recurse to |  | ||||||
|         :param instance: The owning instance used for tracking changes by |  | ||||||
|             :class:`~mongoengine.base.ComplexBaseField` |  | ||||||
|         :param name: The name of the field, used for tracking changes by |  | ||||||
|             :class:`~mongoengine.base.ComplexBaseField` |  | ||||||
|         :param get: A boolean determining if being called by __get__ |  | ||||||
|         """ |  | ||||||
|         if items is None or isinstance(items, basestring): |  | ||||||
|             return items |  | ||||||
|  |  | ||||||
|         # cheapest way to convert a queryset to a list |  | ||||||
|         # list(queryset) uses a count() query to determine length |  | ||||||
|         if isinstance(items, QuerySet): |  | ||||||
|             items = [i for i in items] |  | ||||||
|  |  | ||||||
|         self.max_depth = max_depth |  | ||||||
|  |  | ||||||
|         doc_type = None |  | ||||||
|         if instance and instance._fields: |  | ||||||
|             doc_type = instance._fields[name].field |  | ||||||
|  |  | ||||||
|             if isinstance(doc_type, ReferenceField): |  | ||||||
|                 doc_type = doc_type.document_type |  | ||||||
|                 if all([i.__class__ == doc_type for i in items]): |  | ||||||
|                     return items |  | ||||||
|  |  | ||||||
|         self.reference_map = self._find_references(items) |  | ||||||
|         self.object_map = self._fetch_objects(doc_type=doc_type) |  | ||||||
|         return self._attach_objects(items, 0, instance, name) |  | ||||||
|  |  | ||||||
|     def _find_references(self, items, depth=0): |  | ||||||
|         """ |  | ||||||
|         Recursively finds all db references to be dereferenced |  | ||||||
|  |  | ||||||
|         :param items: The iterable (dict, list, queryset) |  | ||||||
|         :param depth: The current depth of recursion |  | ||||||
|         """ |  | ||||||
|         reference_map = {} |  | ||||||
|         if not items or depth >= self.max_depth: |  | ||||||
|             return reference_map |  | ||||||
|  |  | ||||||
|         # Determine the iterator to use |  | ||||||
|         if not hasattr(items, 'items'): |  | ||||||
|             iterator = enumerate(items) |  | ||||||
|         else: |  | ||||||
|             iterator = items.iteritems() |  | ||||||
|  |  | ||||||
|         # Recursively find dbreferences |  | ||||||
|         depth += 1 |  | ||||||
|         for k, item in iterator: |  | ||||||
|             if hasattr(item, '_fields'): |  | ||||||
|                 for field_name, field in item._fields.iteritems(): |  | ||||||
|                     v = item._data.get(field_name, None) |  | ||||||
|                     if isinstance(v, (DBRef)): |  | ||||||
|                         reference_map.setdefault(field.document_type, []).append(v.id) |  | ||||||
|                     elif isinstance(v, (dict, SON)) and '_ref' in v: |  | ||||||
|                         reference_map.setdefault(get_document(v['_cls']), []).append(v['_ref'].id) |  | ||||||
|                     elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: |  | ||||||
|                         field_cls = getattr(getattr(field, 'field', None), 'document_type', None) |  | ||||||
|                         references = self._find_references(v, depth) |  | ||||||
|                         for key, refs in references.iteritems(): |  | ||||||
|                             if isinstance(field_cls, (Document, TopLevelDocumentMetaclass)): |  | ||||||
|                                 key = field_cls |  | ||||||
|                             reference_map.setdefault(key, []).extend(refs) |  | ||||||
|             elif isinstance(item, (DBRef)): |  | ||||||
|                 reference_map.setdefault(item.collection, []).append(item.id) |  | ||||||
|             elif isinstance(item, (dict, SON)) and '_ref' in item: |  | ||||||
|                 reference_map.setdefault(get_document(item['_cls']), []).append(item['_ref'].id) |  | ||||||
|             elif isinstance(item, (dict, list, tuple)) and depth - 1 <= self.max_depth: |  | ||||||
|                 references = self._find_references(item, depth - 1) |  | ||||||
|                 for key, refs in references.iteritems(): |  | ||||||
|                     reference_map.setdefault(key, []).extend(refs) |  | ||||||
|  |  | ||||||
|         return reference_map |  | ||||||
|  |  | ||||||
|     def _fetch_objects(self, doc_type=None): |  | ||||||
|         """Fetch all references and convert to their document objects |  | ||||||
|         """ |  | ||||||
|         object_map = {} |  | ||||||
|         for col, dbrefs in self.reference_map.iteritems(): |  | ||||||
|             keys = object_map.keys() |  | ||||||
|             refs = list(set([dbref for dbref in dbrefs if str(dbref) not in keys])) |  | ||||||
|             if hasattr(col, 'objects'):  # We have a document class for the refs |  | ||||||
|                 references = col.objects.in_bulk(refs) |  | ||||||
|                 for key, doc in references.iteritems(): |  | ||||||
|                     object_map[key] = doc |  | ||||||
|             else:  # Generic reference: use the refs data to convert to document |  | ||||||
|                 if doc_type and not isinstance(doc_type, (ListField, DictField, MapField,) ): |  | ||||||
|                     references = doc_type._get_db()[col].find({'_id': {'$in': refs}}) |  | ||||||
|                     for ref in references: |  | ||||||
|                         doc = doc_type._from_son(ref) |  | ||||||
|                         object_map[doc.id] = doc |  | ||||||
|                 else: |  | ||||||
|                     references = get_db()[col].find({'_id': {'$in': refs}}) |  | ||||||
|                     for ref in references: |  | ||||||
|                         if '_cls' in ref: |  | ||||||
|                             doc = get_document(ref["_cls"])._from_son(ref) |  | ||||||
|                         elif doc_type is None: |  | ||||||
|                             doc = get_document( |  | ||||||
|                                 ''.join(x.capitalize() |  | ||||||
|                                         for x in col.split('_')))._from_son(ref) |  | ||||||
|                         else: |  | ||||||
|                             doc = doc_type._from_son(ref) |  | ||||||
|                         object_map[doc.id] = doc |  | ||||||
|         return object_map |  | ||||||
|  |  | ||||||
|     def _attach_objects(self, items, depth=0, instance=None, name=None): |  | ||||||
|         """ |  | ||||||
|         Recursively finds all db references to be dereferenced |  | ||||||
|  |  | ||||||
|         :param items: The iterable (dict, list, queryset) |  | ||||||
|         :param depth: The current depth of recursion |  | ||||||
|         :param instance: The owning instance used for tracking changes by |  | ||||||
|             :class:`~mongoengine.base.ComplexBaseField` |  | ||||||
|         :param name: The name of the field, used for tracking changes by |  | ||||||
|             :class:`~mongoengine.base.ComplexBaseField` |  | ||||||
|         """ |  | ||||||
|         if not items: |  | ||||||
|             if isinstance(items, (BaseDict, BaseList)): |  | ||||||
|                 return items |  | ||||||
|  |  | ||||||
|             if instance: |  | ||||||
|                 if isinstance(items, dict): |  | ||||||
|                     return BaseDict(items, instance, name) |  | ||||||
|                 else: |  | ||||||
|                     return BaseList(items, instance, name) |  | ||||||
|  |  | ||||||
|         if isinstance(items, (dict, SON)): |  | ||||||
|             if '_ref' in items: |  | ||||||
|                 return self.object_map.get(items['_ref'].id, items) |  | ||||||
|             elif '_types' in items and '_cls' in items: |  | ||||||
|                 doc = get_document(items['_cls'])._from_son(items) |  | ||||||
|                 doc._data = self._attach_objects(doc._data, depth, doc, name) |  | ||||||
|                 return doc |  | ||||||
|  |  | ||||||
|         if not hasattr(items, 'items'): |  | ||||||
|             is_list = True |  | ||||||
|             iterator = enumerate(items) |  | ||||||
|             data = [] |  | ||||||
|         else: |  | ||||||
|             is_list = False |  | ||||||
|             iterator = items.iteritems() |  | ||||||
|             data = {} |  | ||||||
|  |  | ||||||
|         depth += 1 |  | ||||||
|         for k, v in iterator: |  | ||||||
|             if is_list: |  | ||||||
|                 data.append(v) |  | ||||||
|             else: |  | ||||||
|                 data[k] = v |  | ||||||
|  |  | ||||||
|             if k in self.object_map and not is_list: |  | ||||||
|                 data[k] = self.object_map[k] |  | ||||||
|             elif hasattr(v, '_fields'): |  | ||||||
|                 for field_name, field in v._fields.iteritems(): |  | ||||||
|                     v = data[k]._data.get(field_name, None) |  | ||||||
|                     if isinstance(v, (DBRef)): |  | ||||||
|                         data[k]._data[field_name] = self.object_map.get(v.id, v) |  | ||||||
|                     elif isinstance(v, (dict, SON)) and '_ref' in v: |  | ||||||
|                         data[k]._data[field_name] = self.object_map.get(v['_ref'].id, v) |  | ||||||
|                     elif isinstance(v, dict) and depth <= self.max_depth: |  | ||||||
|                         data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name) |  | ||||||
|                     elif isinstance(v, (list, tuple)) and depth <= self.max_depth: |  | ||||||
|                         data[k]._data[field_name] = self._attach_objects(v, depth, instance=instance, name=name) |  | ||||||
|             elif isinstance(v, (dict, list, tuple)) and depth <= self.max_depth: |  | ||||||
|                 data[k] = self._attach_objects(v, depth - 1, instance=instance, name=name) |  | ||||||
|             elif hasattr(v, 'id'): |  | ||||||
|                 data[k] = self.object_map.get(v.id, v) |  | ||||||
|  |  | ||||||
|         if instance and name: |  | ||||||
|             if is_list: |  | ||||||
|                 return BaseList(data, instance, name) |  | ||||||
|             return BaseDict(data, instance, name) |  | ||||||
|         depth += 1 |  | ||||||
|         return data |  | ||||||
| @@ -1,76 +1,36 @@ | |||||||
| import datetime |  | ||||||
|  |  | ||||||
| from mongoengine import * | from mongoengine import * | ||||||
|  |  | ||||||
|  | from django.utils.hashcompat import md5_constructor, sha_constructor | ||||||
| from django.utils.encoding import smart_str | from django.utils.encoding import smart_str | ||||||
| from django.contrib.auth.models import AnonymousUser | from django.contrib.auth.models import AnonymousUser | ||||||
| from django.utils.translation import ugettext_lazy as _ |  | ||||||
|  |  | ||||||
| try: |  | ||||||
|     from django.contrib.auth.hashers import check_password, make_password |  | ||||||
| except ImportError: |  | ||||||
|     """Handle older versions of Django""" |  | ||||||
|     from django.utils.hashcompat import md5_constructor, sha_constructor |  | ||||||
|  |  | ||||||
|     def get_hexdigest(algorithm, salt, raw_password): |  | ||||||
|         raw_password, salt = smart_str(raw_password), smart_str(salt) |  | ||||||
|         if algorithm == 'md5': |  | ||||||
|             return md5_constructor(salt + raw_password).hexdigest() |  | ||||||
|         elif algorithm == 'sha1': |  | ||||||
|             return sha_constructor(salt + raw_password).hexdigest() |  | ||||||
|         raise ValueError('Got unknown password algorithm type in password') |  | ||||||
|  |  | ||||||
|     def check_password(raw_password, password): |  | ||||||
|         algo, salt, hash = password.split('$') |  | ||||||
|         return hash == get_hexdigest(algo, salt, raw_password) |  | ||||||
|  |  | ||||||
|     def make_password(raw_password): |  | ||||||
|         from random import random |  | ||||||
|         algo = 'sha1' |  | ||||||
|         salt = get_hexdigest(algo, str(random()), str(random()))[:5] |  | ||||||
|         hash = get_hexdigest(algo, salt, raw_password) |  | ||||||
|         return '%s$%s$%s' % (algo, salt, hash) |  | ||||||
|  |  | ||||||
|  | import datetime | ||||||
|  |  | ||||||
| REDIRECT_FIELD_NAME = 'next' | REDIRECT_FIELD_NAME = 'next' | ||||||
|  |  | ||||||
|  | def get_hexdigest(algorithm, salt, raw_password): | ||||||
|  |     raw_password, salt = smart_str(raw_password), smart_str(salt) | ||||||
|  |     if algorithm == 'md5': | ||||||
|  |         return md5_constructor(salt + raw_password).hexdigest() | ||||||
|  |     elif algorithm == 'sha1': | ||||||
|  |         return sha_constructor(salt + raw_password).hexdigest() | ||||||
|  |     raise ValueError('Got unknown password algorithm type in password') | ||||||
|  |  | ||||||
|  |  | ||||||
| class User(Document): | class User(Document): | ||||||
|     """A User document that aims to mirror most of the API specified by Django |     """A User document that aims to mirror most of the API specified by Django | ||||||
|     at http://docs.djangoproject.com/en/dev/topics/auth/#users |     at http://docs.djangoproject.com/en/dev/topics/auth/#users | ||||||
|     """ |     """ | ||||||
|     username = StringField(max_length=30, required=True, |     username = StringField(max_length=30, required=True) | ||||||
|                            verbose_name=_('username'), |     first_name = StringField(max_length=30) | ||||||
|                            help_text=_("Required. 30 characters or fewer. Letters, numbers and @/./+/-/_ characters")) |     last_name = StringField(max_length=30) | ||||||
|  |     email = StringField() | ||||||
|     first_name = StringField(max_length=30, |     password = StringField(max_length=128) | ||||||
|                              verbose_name=_('first name')) |     is_staff = BooleanField(default=False) | ||||||
|  |     is_active = BooleanField(default=True) | ||||||
|     last_name = StringField(max_length=30, |     is_superuser = BooleanField(default=False) | ||||||
|                             verbose_name=_('last name')) |     last_login = DateTimeField(default=datetime.datetime.now) | ||||||
|     email = EmailField(verbose_name=_('e-mail address')) |     date_joined = DateTimeField(default=datetime.datetime.now) | ||||||
|     password = StringField(max_length=128, |  | ||||||
|                            verbose_name=_('password'), |  | ||||||
|                            help_text=_("Use '[algo]$[iterations]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>.")) |  | ||||||
|     is_staff = BooleanField(default=False, |  | ||||||
|                             verbose_name=_('staff status'), |  | ||||||
|                             help_text=_("Designates whether the user can log into this admin site.")) |  | ||||||
|     is_active = BooleanField(default=True, |  | ||||||
|                              verbose_name=_('active'), |  | ||||||
|                              help_text=_("Designates whether this user should be treated as active. Unselect this instead of deleting accounts.")) |  | ||||||
|     is_superuser = BooleanField(default=False, |  | ||||||
|                                 verbose_name=_('superuser status'), |  | ||||||
|                                 help_text=_("Designates that this user has all permissions without explicitly assigning them.")) |  | ||||||
|     last_login = DateTimeField(default=datetime.datetime.now, |  | ||||||
|                                verbose_name=_('last login')) |  | ||||||
|     date_joined = DateTimeField(default=datetime.datetime.now, |  | ||||||
|                                 verbose_name=_('date joined')) |  | ||||||
|  |  | ||||||
|     meta = { |  | ||||||
|         'allow_inheritance': True, |  | ||||||
|         'indexes': [ |  | ||||||
|             {'fields': ['username'], 'unique': True} |  | ||||||
|         ] |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     def __unicode__(self): |     def __unicode__(self): | ||||||
|         return self.username |         return self.username | ||||||
| @@ -92,7 +52,11 @@ class User(Document): | |||||||
|         assigning to :attr:`~mongoengine.django.auth.User.password` as the |         assigning to :attr:`~mongoengine.django.auth.User.password` as the | ||||||
|         password is hashed before storage. |         password is hashed before storage. | ||||||
|         """ |         """ | ||||||
|         self.password = make_password(raw_password) |         from random import random | ||||||
|  |         algo = 'sha1' | ||||||
|  |         salt = get_hexdigest(algo, str(random()), str(random()))[:5] | ||||||
|  |         hash = get_hexdigest(algo, salt, raw_password) | ||||||
|  |         self.password = '%s$%s$%s' % (algo, salt, hash) | ||||||
|         self.save() |         self.save() | ||||||
|         return self |         return self | ||||||
|  |  | ||||||
| @@ -102,7 +66,8 @@ class User(Document): | |||||||
|         :attr:`~mongoengine.django.auth.User.password` as the password is |         :attr:`~mongoengine.django.auth.User.password` as the password is | ||||||
|         hashed before storage. |         hashed before storage. | ||||||
|         """ |         """ | ||||||
|         return check_password(raw_password, self.password) |         algo, salt, hash = self.password.split('$') | ||||||
|  |         return hash == get_hexdigest(algo, salt, raw_password) | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def create_user(cls, username, password, email=None): |     def create_user(cls, username, password, email=None): | ||||||
| @@ -121,7 +86,7 @@ class User(Document): | |||||||
|             else: |             else: | ||||||
|                 email = '@'.join([email_name, domain_part.lower()]) |                 email = '@'.join([email_name, domain_part.lower()]) | ||||||
|  |  | ||||||
|         user = cls(username=username, email=email, date_joined=now) |         user = User(username=username, email=email, date_joined=now) | ||||||
|         user.set_password(password) |         user.set_password(password) | ||||||
|         user.save() |         user.save() | ||||||
|         return user |         return user | ||||||
| @@ -134,10 +99,6 @@ class MongoEngineBackend(object): | |||||||
|     """Authenticate using MongoEngine and mongoengine.django.auth.User. |     """Authenticate using MongoEngine and mongoengine.django.auth.User. | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     supports_object_permissions = False |  | ||||||
|     supports_anonymous_user = False |  | ||||||
|     supports_inactive_user = False |  | ||||||
|  |  | ||||||
|     def authenticate(self, username=None, password=None): |     def authenticate(self, username=None, password=None): | ||||||
|         user = User.objects(username=username).first() |         user = User.objects(username=username).first() | ||||||
|         if user: |         if user: | ||||||
|   | |||||||
| @@ -1,6 +1,3 @@ | |||||||
| from datetime import datetime |  | ||||||
|  |  | ||||||
| from django.conf import settings |  | ||||||
| from django.contrib.sessions.backends.base import SessionBase, CreateError | from django.contrib.sessions.backends.base import SessionBase, CreateError | ||||||
| from django.core.exceptions import SuspiciousOperation | from django.core.exceptions import SuspiciousOperation | ||||||
| from django.utils.encoding import force_unicode | from django.utils.encoding import force_unicode | ||||||
| @@ -8,22 +5,16 @@ from django.utils.encoding import force_unicode | |||||||
| from mongoengine.document import Document | from mongoengine.document import Document | ||||||
| from mongoengine import fields | from mongoengine import fields | ||||||
| from mongoengine.queryset import OperationError | from mongoengine.queryset import OperationError | ||||||
| from mongoengine.connection import DEFAULT_CONNECTION_NAME |  | ||||||
|  |  | ||||||
|  | from datetime import datetime | ||||||
| MONGOENGINE_SESSION_DB_ALIAS = getattr( |  | ||||||
|     settings, 'MONGOENGINE_SESSION_DB_ALIAS', |  | ||||||
|     DEFAULT_CONNECTION_NAME) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class MongoSession(Document): | class MongoSession(Document): | ||||||
|     session_key = fields.StringField(primary_key=True, max_length=40) |     session_key = fields.StringField(primary_key=True, max_length=40) | ||||||
|     session_data = fields.StringField() |     session_data = fields.StringField() | ||||||
|     expire_date = fields.DateTimeField() |     expire_date = fields.DateTimeField() | ||||||
|  |      | ||||||
|     meta = {'collection': 'django_session', |     meta = {'collection': 'django_session', 'allow_inheritance': False} | ||||||
|             'db_alias': MONGOENGINE_SESSION_DB_ALIAS, |  | ||||||
|             'allow_inheritance': False} |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class SessionStore(SessionBase): | class SessionStore(SessionBase): | ||||||
| @@ -44,7 +35,7 @@ class SessionStore(SessionBase): | |||||||
|  |  | ||||||
|     def create(self): |     def create(self): | ||||||
|         while True: |         while True: | ||||||
|             self._session_key = self._get_new_session_key() |             self.session_key = self._get_new_session_key() | ||||||
|             try: |             try: | ||||||
|                 self.save(must_create=True) |                 self.save(must_create=True) | ||||||
|             except CreateError: |             except CreateError: | ||||||
| @@ -54,8 +45,6 @@ class SessionStore(SessionBase): | |||||||
|             return |             return | ||||||
|  |  | ||||||
|     def save(self, must_create=False): |     def save(self, must_create=False): | ||||||
|         if self.session_key is None: |  | ||||||
|             self._session_key = self._get_new_session_key() |  | ||||||
|         s = MongoSession(session_key=self.session_key) |         s = MongoSession(session_key=self.session_key) | ||||||
|         s.session_data = self.encode(self._get_session(no_load=must_create)) |         s.session_data = self.encode(self._get_session(no_load=must_create)) | ||||||
|         s.expire_date = self.get_expiry_date() |         s.expire_date = self.get_expiry_date() | ||||||
|   | |||||||
| @@ -1,7 +1,6 @@ | |||||||
| from django.http import Http404 | from django.http import Http404 | ||||||
| from mongoengine.queryset import QuerySet | from mongoengine.queryset import QuerySet | ||||||
| from mongoengine.base import BaseDocument | from mongoengine.base import BaseDocument | ||||||
| from mongoengine.base import ValidationError |  | ||||||
|  |  | ||||||
| def _get_queryset(cls): | def _get_queryset(cls): | ||||||
|     """Inspired by django.shortcuts.*""" |     """Inspired by django.shortcuts.*""" | ||||||
| @@ -26,7 +25,7 @@ def get_document_or_404(cls, *args, **kwargs): | |||||||
|     queryset = _get_queryset(cls) |     queryset = _get_queryset(cls) | ||||||
|     try: |     try: | ||||||
|         return queryset.get(*args, **kwargs) |         return queryset.get(*args, **kwargs) | ||||||
|     except (queryset._document.DoesNotExist, ValidationError): |     except queryset._document.DoesNotExist: | ||||||
|         raise Http404('No %s matches the given query.' % queryset._document._class_name) |         raise Http404('No %s matches the given query.' % queryset._document._class_name) | ||||||
|  |  | ||||||
| def get_list_or_404(cls, *args, **kwargs): | def get_list_or_404(cls, *args, **kwargs): | ||||||
|   | |||||||
| @@ -10,7 +10,7 @@ class MongoTestCase(TestCase): | |||||||
|     """ |     """ | ||||||
|     db_name = 'test_%s' % settings.MONGO_DATABASE_NAME |     db_name = 'test_%s' % settings.MONGO_DATABASE_NAME | ||||||
|     def __init__(self, methodName='runtest'): |     def __init__(self, methodName='runtest'): | ||||||
|         self.db = connect(self.db_name).get_db() |         self.db = connect(self.db_name) | ||||||
|         super(MongoTestCase, self).__init__(methodName) |         super(MongoTestCase, self).__init__(methodName) | ||||||
|  |  | ||||||
|     def _post_teardown(self): |     def _post_teardown(self): | ||||||
|   | |||||||
| @@ -1,19 +1,12 @@ | |||||||
|  | from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, | ||||||
|  |                   ValidationError) | ||||||
|  | from queryset import OperationError | ||||||
|  | from connection import _get_db | ||||||
|  |  | ||||||
| import pymongo | import pymongo | ||||||
|  |  | ||||||
| from bson.dbref import DBRef |  | ||||||
|  |  | ||||||
| from mongoengine import signals | __all__ = ['Document', 'EmbeddedDocument', 'ValidationError', 'OperationError'] | ||||||
| from base import (DocumentMetaclass, TopLevelDocumentMetaclass, BaseDocument, |  | ||||||
|                   BaseDict, BaseList) |  | ||||||
| from queryset import OperationError |  | ||||||
| from connection import get_db, DEFAULT_CONNECTION_NAME |  | ||||||
|  |  | ||||||
| __all__ = ['Document', 'EmbeddedDocument', 'DynamicDocument', |  | ||||||
|            'DynamicEmbeddedDocument', 'OperationError', 'InvalidCollectionError'] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class InvalidCollectionError(Exception): |  | ||||||
|     pass |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class EmbeddedDocument(BaseDocument): | class EmbeddedDocument(BaseDocument): | ||||||
| @@ -25,26 +18,6 @@ class EmbeddedDocument(BaseDocument): | |||||||
|  |  | ||||||
|     __metaclass__ = DocumentMetaclass |     __metaclass__ = DocumentMetaclass | ||||||
|  |  | ||||||
|     def __init__(self, *args, **kwargs): |  | ||||||
|         super(EmbeddedDocument, self).__init__(*args, **kwargs) |  | ||||||
|         self._changed_fields = [] |  | ||||||
|  |  | ||||||
|     def __delattr__(self, *args, **kwargs): |  | ||||||
|         """Handle deletions of fields""" |  | ||||||
|         field_name = args[0] |  | ||||||
|         if field_name in self._fields: |  | ||||||
|             default = self._fields[field_name].default |  | ||||||
|             if callable(default): |  | ||||||
|                 default = default() |  | ||||||
|             setattr(self, field_name, default) |  | ||||||
|         else: |  | ||||||
|             super(EmbeddedDocument, self).__delattr__(*args, **kwargs) |  | ||||||
|  |  | ||||||
|     def __eq__(self, other): |  | ||||||
|         if isinstance(other, self.__class__): |  | ||||||
|             return self._data == other._data |  | ||||||
|         return False |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Document(BaseDocument): | class Document(BaseDocument): | ||||||
|     """The base class used for defining the structure and properties of |     """The base class used for defining the structure and properties of | ||||||
| @@ -79,71 +52,11 @@ class Document(BaseDocument): | |||||||
|     dictionary. The value should be a list of field names or tuples of field |     dictionary. The value should be a list of field names or tuples of field | ||||||
|     names. Index direction may be specified by prefixing the field names with |     names. Index direction may be specified by prefixing the field names with | ||||||
|     a **+** or **-** sign. |     a **+** or **-** sign. | ||||||
|  |  | ||||||
|     Automatic index creation can be disabled by specifying |  | ||||||
|     attr:`auto_create_index` in the :attr:`meta` dictionary. If this is set to |  | ||||||
|     False then indexes will not be created by MongoEngine.  This is useful in |  | ||||||
|     production systems where index creation is performed as part of a deployment |  | ||||||
|     system. |  | ||||||
|  |  | ||||||
|     By default, _types will be added to the start of every index (that |  | ||||||
|     doesn't contain a list) if allow_inheritance is True. This can be |  | ||||||
|     disabled by either setting types to False on the specific index or |  | ||||||
|     by setting index_types to False on the meta dictionary for the document. |  | ||||||
|     """ |     """ | ||||||
|  |  | ||||||
|     __metaclass__ = TopLevelDocumentMetaclass |     __metaclass__ = TopLevelDocumentMetaclass | ||||||
|  |  | ||||||
|     @apply |     def save(self, safe=True, force_insert=False, validate=True): | ||||||
|     def pk(): |  | ||||||
|         """Primary key alias |  | ||||||
|         """ |  | ||||||
|         def fget(self): |  | ||||||
|             return getattr(self, self._meta['id_field']) |  | ||||||
|         def fset(self, value): |  | ||||||
|             return setattr(self, self._meta['id_field'], value) |  | ||||||
|         return property(fget, fset) |  | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     def _get_db(cls): |  | ||||||
|         """Some Model using other db_alias""" |  | ||||||
|         return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME )) |  | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     def _get_collection(cls): |  | ||||||
|         """Returns the collection for the document.""" |  | ||||||
|         if not hasattr(cls, '_collection') or cls._collection is None: |  | ||||||
|             db = cls._get_db() |  | ||||||
|             collection_name = cls._get_collection_name() |  | ||||||
|             # Create collection as a capped collection if specified |  | ||||||
|             if cls._meta['max_size'] or cls._meta['max_documents']: |  | ||||||
|                 # Get max document limit and max byte size from meta |  | ||||||
|                 max_size = cls._meta['max_size'] or 10000000  # 10MB default |  | ||||||
|                 max_documents = cls._meta['max_documents'] |  | ||||||
|  |  | ||||||
|                 if collection_name in db.collection_names(): |  | ||||||
|                     cls._collection = db[collection_name] |  | ||||||
|                     # The collection already exists, check if its capped |  | ||||||
|                     # options match the specified capped options |  | ||||||
|                     options = cls._collection.options() |  | ||||||
|                     if options.get('max') != max_documents or \ |  | ||||||
|                        options.get('size') != max_size: |  | ||||||
|                         msg = ('Cannot create collection "%s" as a capped ' |  | ||||||
|                                'collection as it already exists') % cls._collection |  | ||||||
|                         raise InvalidCollectionError(msg) |  | ||||||
|                 else: |  | ||||||
|                     # Create the collection as a capped collection |  | ||||||
|                     opts = {'capped': True, 'size': max_size} |  | ||||||
|                     if max_documents: |  | ||||||
|                         opts['max'] = max_documents |  | ||||||
|                     cls._collection = db.create_collection( |  | ||||||
|                         collection_name, **opts |  | ||||||
|                     ) |  | ||||||
|             else: |  | ||||||
|                 cls._collection = db[collection_name] |  | ||||||
|         return cls._collection |  | ||||||
|  |  | ||||||
|     def save(self, safe=True, force_insert=False, validate=True, write_options=None, |  | ||||||
|             cascade=None, cascade_kwargs=None, _refs=None): |  | ||||||
|         """Save the :class:`~mongoengine.Document` to the database. If the |         """Save the :class:`~mongoengine.Document` to the database. If the | ||||||
|         document already exists, it will be updated, otherwise it will be |         document already exists, it will be updated, otherwise it will be | ||||||
|         created. |         created. | ||||||
| @@ -154,81 +67,17 @@ class Document(BaseDocument): | |||||||
|         :param safe: check if the operation succeeded before returning |         :param safe: check if the operation succeeded before returning | ||||||
|         :param force_insert: only try to create a new document, don't allow |         :param force_insert: only try to create a new document, don't allow | ||||||
|             updates of existing documents |             updates of existing documents | ||||||
|         :param validate: validates the document; set to ``False`` to skip. |         :param validate: validates the document; set to ``False`` for skiping | ||||||
|         :param write_options: Extra keyword arguments are passed down to |  | ||||||
|                 :meth:`~pymongo.collection.Collection.save` OR |  | ||||||
|                 :meth:`~pymongo.collection.Collection.insert` |  | ||||||
|                 which will be used as options for the resultant ``getLastError`` command. |  | ||||||
|                 For example, ``save(..., write_options={w: 2, fsync: True}, ...)`` will |  | ||||||
|                 wait until at least two servers have recorded the write and will force an |  | ||||||
|                 fsync on each server being written to. |  | ||||||
|         :param cascade: Sets the flag for cascading saves.  You can set a default by setting |  | ||||||
|             "cascade" in the document __meta__ |  | ||||||
|         :param cascade_kwargs: optional kwargs dictionary to be passed throw to cascading saves |  | ||||||
|         :param _refs: A list of processed references used in cascading saves |  | ||||||
|  |  | ||||||
|         .. versionchanged:: 0.5 |  | ||||||
|             In existing documents it only saves changed fields using set / unset |  | ||||||
|             Saves are cascaded and any :class:`~bson.dbref.DBRef` objects |  | ||||||
|             that have changes are saved as well. |  | ||||||
|         .. versionchanged:: 0.6 |  | ||||||
|             Cascade saves are optional = defaults to True, if you want fine grain |  | ||||||
|             control then you can turn off using document meta['cascade'] = False |  | ||||||
|             Also you can pass different kwargs to the cascade save using cascade_kwargs |  | ||||||
|             which overwrites the existing kwargs with custom values |  | ||||||
|  |  | ||||||
|         """ |         """ | ||||||
|         signals.pre_save.send(self.__class__, document=self) |  | ||||||
|  |  | ||||||
|         if validate: |         if validate: | ||||||
|             self.validate() |             self.validate() | ||||||
|  |  | ||||||
|         if not write_options: |  | ||||||
|             write_options = {} |  | ||||||
|  |  | ||||||
|         doc = self.to_mongo() |         doc = self.to_mongo() | ||||||
|  |  | ||||||
|         created = force_insert or '_id' not in doc |  | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
|             collection = self.__class__.objects._collection |             collection = self.__class__.objects._collection | ||||||
|             if created: |             if force_insert: | ||||||
|                 if force_insert: |                 object_id = collection.insert(doc, safe=safe) | ||||||
|                     object_id = collection.insert(doc, safe=safe, **write_options) |  | ||||||
|                 else: |  | ||||||
|                     object_id = collection.save(doc, safe=safe, **write_options) |  | ||||||
|             else: |             else: | ||||||
|                 object_id = doc['_id'] |                 object_id = collection.save(doc, safe=safe) | ||||||
|                 updates, removals = self._delta() |  | ||||||
|  |  | ||||||
|                 # Need to add shard key to query, or you get an error |  | ||||||
|                 select_dict = {'_id': object_id} |  | ||||||
|                 shard_key = self.__class__._meta.get('shard_key', tuple()) |  | ||||||
|                 for k in shard_key: |  | ||||||
|                     actual_key = self._db_field_map.get(k, k) |  | ||||||
|                     select_dict[actual_key] = doc[actual_key] |  | ||||||
|  |  | ||||||
|                 upsert = self._created |  | ||||||
|                 if updates: |  | ||||||
|                     collection.update(select_dict, {"$set": updates}, upsert=upsert, safe=safe, **write_options) |  | ||||||
|                 if removals: |  | ||||||
|                     collection.update(select_dict, {"$unset": removals}, upsert=upsert, safe=safe, **write_options) |  | ||||||
|  |  | ||||||
|             cascade = self._meta.get('cascade', True) if cascade is None else cascade |  | ||||||
|             if cascade: |  | ||||||
|                 kwargs = { |  | ||||||
|                     "safe": safe, |  | ||||||
|                     "force_insert": force_insert, |  | ||||||
|                     "validate": validate, |  | ||||||
|                     "write_options": write_options, |  | ||||||
|                     "cascade": cascade |  | ||||||
|                 } |  | ||||||
|                 if cascade_kwargs:  # Allow granular control over cascades |  | ||||||
|                     kwargs.update(cascade_kwargs) |  | ||||||
|                 kwargs['_refs'] = _refs |  | ||||||
|                 #self._changed_fields = [] |  | ||||||
|                 self.cascade_save(**kwargs) |  | ||||||
|  |  | ||||||
|         except pymongo.errors.OperationFailure, err: |         except pymongo.errors.OperationFailure, err: | ||||||
|             message = 'Could not save document (%s)' |             message = 'Could not save document (%s)' | ||||||
|             if u'duplicate key' in unicode(err): |             if u'duplicate key' in unicode(err): | ||||||
| @@ -237,175 +86,37 @@ class Document(BaseDocument): | |||||||
|         id_field = self._meta['id_field'] |         id_field = self._meta['id_field'] | ||||||
|         self[id_field] = self._fields[id_field].to_python(object_id) |         self[id_field] = self._fields[id_field].to_python(object_id) | ||||||
|  |  | ||||||
|         self._changed_fields = [] |  | ||||||
|         self._created = False |  | ||||||
|         signals.post_save.send(self.__class__, document=self, created=created) |  | ||||||
|         return self |  | ||||||
|  |  | ||||||
|     def cascade_save(self, *args, **kwargs): |  | ||||||
|         """Recursively saves any references / generic references on an object""" |  | ||||||
|         from fields import ReferenceField, GenericReferenceField |  | ||||||
|         _refs = kwargs.get('_refs', []) or [] |  | ||||||
|  |  | ||||||
|         for name, cls in self._fields.items(): |  | ||||||
|  |  | ||||||
|             if not isinstance(cls, (ReferenceField, GenericReferenceField)): |  | ||||||
|                 continue |  | ||||||
|  |  | ||||||
|             ref = getattr(self, name) |  | ||||||
|             if not ref: |  | ||||||
|                 continue |  | ||||||
|             if isinstance(ref, DBRef): |  | ||||||
|                 continue |  | ||||||
|  |  | ||||||
|             ref_id = "%s,%s" % (ref.__class__.__name__, str(ref._data)) |  | ||||||
|             if ref and ref_id not in _refs: |  | ||||||
|                 _refs.append(ref_id) |  | ||||||
|                 kwargs["_refs"] = _refs |  | ||||||
|                 ref.save(**kwargs) |  | ||||||
|                 ref._changed_fields = [] |  | ||||||
|  |  | ||||||
|     def update(self, **kwargs): |  | ||||||
|         """Performs an update on the :class:`~mongoengine.Document` |  | ||||||
|         A convenience wrapper to :meth:`~mongoengine.QuerySet.update`. |  | ||||||
|  |  | ||||||
|         Raises :class:`OperationError` if called on an object that has not yet |  | ||||||
|         been saved. |  | ||||||
|         """ |  | ||||||
|         if not self.pk: |  | ||||||
|             raise OperationError('attempt to update a document not yet saved') |  | ||||||
|  |  | ||||||
|         # Need to add shard key to query, or you get an error |  | ||||||
|         select_dict = {'pk': self.pk} |  | ||||||
|         shard_key = self.__class__._meta.get('shard_key', tuple()) |  | ||||||
|         for k in shard_key: |  | ||||||
|             select_dict[k] = getattr(self, k) |  | ||||||
|         return self.__class__.objects(**select_dict).update_one(**kwargs) |  | ||||||
|  |  | ||||||
|     def delete(self, safe=False): |     def delete(self, safe=False): | ||||||
|         """Delete the :class:`~mongoengine.Document` from the database. This |         """Delete the :class:`~mongoengine.Document` from the database. This | ||||||
|         will only take effect if the document has been previously saved. |         will only take effect if the document has been previously saved. | ||||||
|  |  | ||||||
|         :param safe: check if the operation succeeded before returning |         :param safe: check if the operation succeeded before returning | ||||||
|         """ |         """ | ||||||
|         signals.pre_delete.send(self.__class__, document=self) |         id_field = self._meta['id_field'] | ||||||
|  |         object_id = self._fields[id_field].to_mongo(self[id_field]) | ||||||
|         try: |         try: | ||||||
|             self.__class__.objects(pk=self.pk).delete(safe=safe) |             self.__class__.objects(**{id_field: object_id}).delete(safe=safe) | ||||||
|         except pymongo.errors.OperationFailure, err: |         except pymongo.errors.OperationFailure, err: | ||||||
|             message = u'Could not delete document (%s)' % err.message |             message = u'Could not delete document (%s)' % err.message | ||||||
|             raise OperationError(message) |             raise OperationError(message) | ||||||
|  |  | ||||||
|         signals.post_delete.send(self.__class__, document=self) |     def reload(self): | ||||||
|  |  | ||||||
|     def select_related(self, max_depth=1): |  | ||||||
|         """Handles dereferencing of :class:`~bson.dbref.DBRef` objects to |  | ||||||
|         a maximum depth in order to cut down the number queries to mongodb. |  | ||||||
|  |  | ||||||
|         .. versionadded:: 0.5 |  | ||||||
|         """ |  | ||||||
|         from dereference import DeReference |  | ||||||
|         self._data = DeReference()(self._data, max_depth) |  | ||||||
|         return self |  | ||||||
|  |  | ||||||
|     def reload(self, max_depth=1): |  | ||||||
|         """Reloads all attributes from the database. |         """Reloads all attributes from the database. | ||||||
|  |  | ||||||
|         .. versionadded:: 0.1.2 |         .. versionadded:: 0.1.2 | ||||||
|         .. versionchanged:: 0.6  Now chainable |  | ||||||
|         """ |         """ | ||||||
|         id_field = self._meta['id_field'] |         id_field = self._meta['id_field'] | ||||||
|         obj = self.__class__.objects( |         obj = self.__class__.objects(**{id_field: self[id_field]}).first() | ||||||
|                 **{id_field: self[id_field]} |  | ||||||
|               ).first().select_related(max_depth=max_depth) |  | ||||||
|         for field in self._fields: |         for field in self._fields: | ||||||
|             setattr(self, field, self._reload(field, obj[field])) |             setattr(self, field, obj[field]) | ||||||
|         if self._dynamic: |  | ||||||
|             for name in self._dynamic_fields.keys(): |  | ||||||
|                 setattr(self, name, self._reload(name, obj._data[name])) |  | ||||||
|         self._changed_fields = obj._changed_fields |  | ||||||
|         return obj |  | ||||||
|  |  | ||||||
|     def _reload(self, key, value): |  | ||||||
|         """Used by :meth:`~mongoengine.Document.reload` to ensure the |  | ||||||
|         correct instance is linked to self. |  | ||||||
|         """ |  | ||||||
|         if isinstance(value, BaseDict): |  | ||||||
|             value = [(k, self._reload(k, v)) for k, v in value.items()] |  | ||||||
|             value = BaseDict(value, self, key) |  | ||||||
|         elif isinstance(value, BaseList): |  | ||||||
|             value = [self._reload(key, v) for v in value] |  | ||||||
|             value = BaseList(value, self, key) |  | ||||||
|         elif isinstance(value, (EmbeddedDocument, DynamicEmbeddedDocument)): |  | ||||||
|             value._changed_fields = [] |  | ||||||
|         return value |  | ||||||
|  |  | ||||||
|     def to_dbref(self): |  | ||||||
|         """Returns an instance of :class:`~bson.dbref.DBRef` useful in |  | ||||||
|         `__raw__` queries.""" |  | ||||||
|         if not self.pk: |  | ||||||
|             msg = "Only saved documents can have a valid dbref" |  | ||||||
|             raise OperationError(msg) |  | ||||||
|         return DBRef(self.__class__._get_collection_name(), self.pk) |  | ||||||
|  |  | ||||||
|     @classmethod |  | ||||||
|     def register_delete_rule(cls, document_cls, field_name, rule): |  | ||||||
|         """This method registers the delete rules to apply when removing this |  | ||||||
|         object. |  | ||||||
|         """ |  | ||||||
|         cls._meta['delete_rules'][(document_cls, field_name)] = rule |  | ||||||
|  |  | ||||||
|     @classmethod |     @classmethod | ||||||
|     def drop_collection(cls): |     def drop_collection(cls): | ||||||
|         """Drops the entire collection associated with this |         """Drops the entire collection associated with this | ||||||
|         :class:`~mongoengine.Document` type from the database. |         :class:`~mongoengine.Document` type from the database. | ||||||
|         """ |         """ | ||||||
|         from mongoengine.queryset import QuerySet |         db = _get_db() | ||||||
|         db = cls._get_db() |         db.drop_collection(cls._meta['collection']) | ||||||
|         db.drop_collection(cls._get_collection_name()) |  | ||||||
|         QuerySet._reset_already_indexed(cls) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class DynamicDocument(Document): |  | ||||||
|     """A Dynamic Document class allowing flexible, expandable and uncontrolled |  | ||||||
|     schemas.  As a :class:`~mongoengine.Document` subclass, acts in the same |  | ||||||
|     way as an ordinary document but has expando style properties.  Any data |  | ||||||
|     passed or set against the :class:`~mongoengine.DynamicDocument` that is |  | ||||||
|     not a field is automatically converted into a |  | ||||||
|     :class:`~mongoengine.DynamicField` and data can be attributed to that |  | ||||||
|     field. |  | ||||||
|  |  | ||||||
|     ..note:: |  | ||||||
|  |  | ||||||
|         There is one caveat on Dynamic Documents: fields cannot start with `_` |  | ||||||
|     """ |  | ||||||
|     __metaclass__ = TopLevelDocumentMetaclass |  | ||||||
|     _dynamic = True |  | ||||||
|  |  | ||||||
|     def __delattr__(self, *args, **kwargs): |  | ||||||
|         """Deletes the attribute by setting to None and allowing _delta to unset |  | ||||||
|         it""" |  | ||||||
|         field_name = args[0] |  | ||||||
|         if field_name in self._dynamic_fields: |  | ||||||
|             setattr(self, field_name, None) |  | ||||||
|         else: |  | ||||||
|             super(DynamicDocument, self).__delattr__(*args, **kwargs) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class DynamicEmbeddedDocument(EmbeddedDocument): |  | ||||||
|     """A Dynamic Embedded Document class allowing flexible, expandable and |  | ||||||
|     uncontrolled schemas. See :class:`~mongoengine.DynamicDocument` for more |  | ||||||
|     information about dynamic documents. |  | ||||||
|     """ |  | ||||||
|  |  | ||||||
|     __metaclass__ = DocumentMetaclass |  | ||||||
|     _dynamic = True |  | ||||||
|  |  | ||||||
|     def __delattr__(self, *args, **kwargs): |  | ||||||
|         """Deletes the attribute by setting to None and allowing _delta to unset |  | ||||||
|         it""" |  | ||||||
|         field_name = args[0] |  | ||||||
|         setattr(self, field_name, None) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class MapReduceDocument(object): | class MapReduceDocument(object): | ||||||
|   | |||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,46 +0,0 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
|  |  | ||||||
| __all__ = ['pre_init', 'post_init', 'pre_save', 'post_save', |  | ||||||
|            'pre_delete', 'post_delete'] |  | ||||||
|  |  | ||||||
| signals_available = False |  | ||||||
| try: |  | ||||||
|     from blinker import Namespace |  | ||||||
|     signals_available = True |  | ||||||
| except ImportError: |  | ||||||
|     class Namespace(object): |  | ||||||
|         def signal(self, name, doc=None): |  | ||||||
|             return _FakeSignal(name, doc) |  | ||||||
|  |  | ||||||
|     class _FakeSignal(object): |  | ||||||
|         """If blinker is unavailable, create a fake class with the same |  | ||||||
|         interface that allows sending of signals but will fail with an |  | ||||||
|         error on anything else.  Instead of doing anything on send, it |  | ||||||
|         will just ignore the arguments and do nothing instead. |  | ||||||
|         """ |  | ||||||
|  |  | ||||||
|         def __init__(self, name, doc=None): |  | ||||||
|             self.name = name |  | ||||||
|             self.__doc__ = doc |  | ||||||
|  |  | ||||||
|         def _fail(self, *args, **kwargs): |  | ||||||
|             raise RuntimeError('signalling support is unavailable ' |  | ||||||
|                                'because the blinker library is ' |  | ||||||
|                                'not installed.') |  | ||||||
|         send = lambda *a, **kw: None |  | ||||||
|         connect = disconnect = has_receivers_for = receivers_for = \ |  | ||||||
|             temporarily_connected_to = _fail |  | ||||||
|         del _fail |  | ||||||
|  |  | ||||||
| # the namespace for code signals.  If you are not mongoengine code, do |  | ||||||
| # not put signals in here.  Create your own namespace instead. |  | ||||||
| _signals = Namespace() |  | ||||||
|  |  | ||||||
| pre_init = _signals.signal('pre_init') |  | ||||||
| post_init = _signals.signal('post_init') |  | ||||||
| pre_save = _signals.signal('pre_save') |  | ||||||
| post_save = _signals.signal('post_save') |  | ||||||
| pre_delete = _signals.signal('pre_delete') |  | ||||||
| post_delete = _signals.signal('post_delete') |  | ||||||
| pre_bulk_insert = _signals.signal('pre_bulk_insert') |  | ||||||
| post_bulk_insert = _signals.signal('post_bulk_insert') |  | ||||||
| @@ -1,59 +0,0 @@ | |||||||
| from mongoengine.connection import get_db |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class query_counter(object): |  | ||||||
|     """ Query_counter contextmanager to get the number of queries. """ |  | ||||||
|  |  | ||||||
|     def __init__(self): |  | ||||||
|         """ Construct the query_counter. """ |  | ||||||
|         self.counter = 0 |  | ||||||
|         self.db = get_db() |  | ||||||
|  |  | ||||||
|     def __enter__(self): |  | ||||||
|         """ On every with block we need to drop the profile collection. """ |  | ||||||
|         self.db.set_profiling_level(0) |  | ||||||
|         self.db.system.profile.drop() |  | ||||||
|         self.db.set_profiling_level(2) |  | ||||||
|         return self |  | ||||||
|  |  | ||||||
|     def __exit__(self, t, value, traceback): |  | ||||||
|         """ Reset the profiling level. """ |  | ||||||
|         self.db.set_profiling_level(0) |  | ||||||
|  |  | ||||||
|     def __eq__(self, value): |  | ||||||
|         """ == Compare querycounter. """ |  | ||||||
|         return value == self._get_count() |  | ||||||
|  |  | ||||||
|     def __ne__(self, value): |  | ||||||
|         """ != Compare querycounter. """ |  | ||||||
|         return not self.__eq__(value) |  | ||||||
|  |  | ||||||
|     def __lt__(self, value): |  | ||||||
|         """ < Compare querycounter. """ |  | ||||||
|         return self._get_count() < value |  | ||||||
|  |  | ||||||
|     def __le__(self, value): |  | ||||||
|         """ <= Compare querycounter. """ |  | ||||||
|         return self._get_count() <= value |  | ||||||
|  |  | ||||||
|     def __gt__(self, value): |  | ||||||
|         """ > Compare querycounter. """ |  | ||||||
|         return self._get_count() > value |  | ||||||
|  |  | ||||||
|     def __ge__(self, value): |  | ||||||
|         """ >= Compare querycounter. """ |  | ||||||
|         return self._get_count() >= value |  | ||||||
|  |  | ||||||
|     def __int__(self): |  | ||||||
|         """ int representation. """ |  | ||||||
|         return self._get_count() |  | ||||||
|  |  | ||||||
|     def __repr__(self): |  | ||||||
|         """ repr query_counter as the number of queries. """ |  | ||||||
|         return u"%s" % self._get_count() |  | ||||||
|  |  | ||||||
|     def _get_count(self): |  | ||||||
|         """ Get the number of queries. """ |  | ||||||
|         count = self.db.system.profile.find().count() - self.counter |  | ||||||
|         self.counter += 1 |  | ||||||
|         return count |  | ||||||
| @@ -1,54 +0,0 @@ | |||||||
| # sitelib for noarch packages, sitearch for others (remove the unneeded one) |  | ||||||
| %{!?python_sitelib: %global python_sitelib %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())")} |  | ||||||
| %{!?python_sitearch: %global python_sitearch %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib(1))")} |  | ||||||
|  |  | ||||||
| %define srcname mongoengine |  | ||||||
|  |  | ||||||
| Name:           python-%{srcname} |  | ||||||
| Version:        0.6.16 |  | ||||||
| Release:        1%{?dist} |  | ||||||
| Summary:        A Python Document-Object Mapper for working with MongoDB |  | ||||||
|  |  | ||||||
| Group:          Development/Libraries |  | ||||||
| License:        MIT |  | ||||||
| URL:            https://github.com/MongoEngine/mongoengine |  | ||||||
| Source0:        %{srcname}-%{version}.tar.bz2 |  | ||||||
|  |  | ||||||
| BuildRequires:  python-devel |  | ||||||
| BuildRequires:  python-setuptools |  | ||||||
|  |  | ||||||
| Requires:       mongodb |  | ||||||
| Requires:       pymongo |  | ||||||
| Requires:       python-blinker |  | ||||||
| Requires:       python-imaging |  | ||||||
|  |  | ||||||
|  |  | ||||||
| %description |  | ||||||
| MongoEngine is an ORM-like layer on top of PyMongo. |  | ||||||
|  |  | ||||||
| %prep |  | ||||||
| %setup -q -n %{srcname}-%{version} |  | ||||||
|  |  | ||||||
|  |  | ||||||
| %build |  | ||||||
| # Remove CFLAGS=... for noarch packages (unneeded) |  | ||||||
| CFLAGS="$RPM_OPT_FLAGS" %{__python} setup.py build |  | ||||||
|  |  | ||||||
|  |  | ||||||
| %install |  | ||||||
| rm -rf $RPM_BUILD_ROOT |  | ||||||
| %{__python} setup.py install -O1 --skip-build --root $RPM_BUILD_ROOT |  | ||||||
|  |  | ||||||
| %clean |  | ||||||
| rm -rf $RPM_BUILD_ROOT |  | ||||||
|  |  | ||||||
| %files |  | ||||||
| %defattr(-,root,root,-) |  | ||||||
| %doc docs AUTHORS LICENSE README.rst |  | ||||||
| # For noarch packages: sitelib |  | ||||||
|  %{python_sitelib}/* |  | ||||||
| # For arch-specific packages: sitearch |  | ||||||
| # %{python_sitearch}/* |  | ||||||
|  |  | ||||||
| %changelog |  | ||||||
| * See: http://readthedocs.org/docs/mongoengine-odm/en/latest/changelog.html |  | ||||||
| @@ -1 +0,0 @@ | |||||||
| pymongo |  | ||||||
							
								
								
									
										13
									
								
								setup.cfg
									
									
									
									
									
								
							
							
						
						
									
										13
									
								
								setup.cfg
									
									
									
									
									
								
							| @@ -1,13 +0,0 @@ | |||||||
| [aliases] |  | ||||||
| test = nosetests |  | ||||||
|  |  | ||||||
| [nosetests] |  | ||||||
| verbosity = 2 |  | ||||||
| detailed-errors = 1 |  | ||||||
| #with-coverage = 1 |  | ||||||
| #cover-erase = 1 |  | ||||||
| #cover-html = 1 |  | ||||||
| #cover-html-dir = ../htmlcov |  | ||||||
| #cover-package = mongoengine |  | ||||||
| where = tests |  | ||||||
| #tests = test_bugfix.py |  | ||||||
							
								
								
									
										10
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										10
									
								
								setup.py
									
									
									
									
									
								
							| @@ -15,7 +15,7 @@ def get_version(version_tuple): | |||||||
|         version = '%s.%s' % (version, version_tuple[2]) |         version = '%s.%s' % (version, version_tuple[2]) | ||||||
|     return version |     return version | ||||||
|  |  | ||||||
| # Dirty hack to get version number from monogengine/__init__.py - we can't | # Dirty hack to get version number from monogengine/__init__.py - we can't  | ||||||
| # import it as it depends on PyMongo and PyMongo isn't installed until this | # import it as it depends on PyMongo and PyMongo isn't installed until this | ||||||
| # file is read | # file is read | ||||||
| init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py') | init = os.path.join(os.path.dirname(__file__), 'mongoengine', '__init__.py') | ||||||
| @@ -35,12 +35,10 @@ CLASSIFIERS = [ | |||||||
|  |  | ||||||
| setup(name='mongoengine', | setup(name='mongoengine', | ||||||
|       version=VERSION, |       version=VERSION, | ||||||
|       packages=find_packages(exclude=('tests',)), |       packages=find_packages(), | ||||||
|       author='Harry Marr', |       author='Harry Marr', | ||||||
|       author_email='harry.marr@{nospam}gmail.com', |       author_email='harry.marr@{nospam}gmail.com', | ||||||
|       maintainer="Ross Lawley", |       url='http://hmarr.com/mongoengine/', | ||||||
|       maintainer_email="ross.lawley@{nospam}gmail.com", |  | ||||||
|       url='http://mongoengine.org/', |  | ||||||
|       license='MIT', |       license='MIT', | ||||||
|       include_package_data=True, |       include_package_data=True, | ||||||
|       description=DESCRIPTION, |       description=DESCRIPTION, | ||||||
| @@ -48,5 +46,5 @@ setup(name='mongoengine', | |||||||
|       platforms=['any'], |       platforms=['any'], | ||||||
|       classifiers=CLASSIFIERS, |       classifiers=CLASSIFIERS, | ||||||
|       install_requires=['pymongo'], |       install_requires=['pymongo'], | ||||||
|       tests_require=['nose', 'coverage', 'blinker', 'django>=1.3', 'PIL'] |       test_suite='tests', | ||||||
| ) | ) | ||||||
|   | |||||||
							
								
								
									
										633
									
								
								tests/document.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										633
									
								
								tests/document.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,633 @@ | |||||||
|  | import unittest | ||||||
|  | from datetime import datetime | ||||||
|  | import bson | ||||||
|  | import pymongo | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  | from mongoengine.connection import _get_db | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class DocumentTest(unittest.TestCase): | ||||||
|  |  | ||||||
|  |     def setUp(self): | ||||||
|  |         connect(db='mongoenginetest') | ||||||
|  |         self.db = _get_db() | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             age = IntField() | ||||||
|  |         self.Person = Person | ||||||
|  |  | ||||||
|  |     def test_drop_collection(self): | ||||||
|  |         """Ensure that the collection may be dropped from the database. | ||||||
|  |         """ | ||||||
|  |         self.Person(name='Test').save() | ||||||
|  |  | ||||||
|  |         collection = self.Person._meta['collection'] | ||||||
|  |         self.assertTrue(collection in self.db.collection_names()) | ||||||
|  |  | ||||||
|  |         self.Person.drop_collection() | ||||||
|  |         self.assertFalse(collection in self.db.collection_names()) | ||||||
|  |  | ||||||
|  |     def test_definition(self): | ||||||
|  |         """Ensure that document may be defined using fields. | ||||||
|  |         """ | ||||||
|  |         name_field = StringField() | ||||||
|  |         age_field = IntField() | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = name_field | ||||||
|  |             age = age_field | ||||||
|  |             non_field = True | ||||||
|  |  | ||||||
|  |         self.assertEqual(Person._fields['name'], name_field) | ||||||
|  |         self.assertEqual(Person._fields['age'], age_field) | ||||||
|  |         self.assertFalse('non_field' in Person._fields) | ||||||
|  |         self.assertTrue('id' in Person._fields) | ||||||
|  |         # Test iteration over fields | ||||||
|  |         fields = list(Person()) | ||||||
|  |         self.assertTrue('name' in fields and 'age' in fields) | ||||||
|  |         # Ensure Document isn't treated like an actual document | ||||||
|  |         self.assertFalse(hasattr(Document, '_fields')) | ||||||
|  |  | ||||||
|  |     def test_get_superclasses(self): | ||||||
|  |         """Ensure that the correct list of superclasses is assembled. | ||||||
|  |         """ | ||||||
|  |         class Animal(Document): pass | ||||||
|  |         class Fish(Animal): pass | ||||||
|  |         class Mammal(Animal): pass | ||||||
|  |         class Human(Mammal): pass | ||||||
|  |         class Dog(Mammal): pass | ||||||
|  |  | ||||||
|  |         mammal_superclasses = {'Animal': Animal} | ||||||
|  |         self.assertEqual(Mammal._superclasses, mammal_superclasses) | ||||||
|  |  | ||||||
|  |         dog_superclasses = { | ||||||
|  |             'Animal': Animal, | ||||||
|  |             'Animal.Mammal': Mammal, | ||||||
|  |         } | ||||||
|  |         self.assertEqual(Dog._superclasses, dog_superclasses) | ||||||
|  |  | ||||||
|  |     def test_get_subclasses(self): | ||||||
|  |         """Ensure that the correct list of subclasses is retrieved by the | ||||||
|  |         _get_subclasses method. | ||||||
|  |         """ | ||||||
|  |         class Animal(Document): pass | ||||||
|  |         class Fish(Animal): pass | ||||||
|  |         class Mammal(Animal): pass | ||||||
|  |         class Human(Mammal): pass | ||||||
|  |         class Dog(Mammal): pass | ||||||
|  |  | ||||||
|  |         mammal_subclasses = { | ||||||
|  |             'Animal.Mammal.Dog': Dog, | ||||||
|  |             'Animal.Mammal.Human': Human | ||||||
|  |         } | ||||||
|  |         self.assertEqual(Mammal._get_subclasses(), mammal_subclasses) | ||||||
|  |  | ||||||
|  |         animal_subclasses = { | ||||||
|  |             'Animal.Fish': Fish, | ||||||
|  |             'Animal.Mammal': Mammal, | ||||||
|  |             'Animal.Mammal.Dog': Dog, | ||||||
|  |             'Animal.Mammal.Human': Human | ||||||
|  |         } | ||||||
|  |         self.assertEqual(Animal._get_subclasses(), animal_subclasses) | ||||||
|  |  | ||||||
|  |     def test_polymorphic_queries(self): | ||||||
|  |         """Ensure that the correct subclasses are returned from a query""" | ||||||
|  |         class Animal(Document): pass | ||||||
|  |         class Fish(Animal): pass | ||||||
|  |         class Mammal(Animal): pass | ||||||
|  |         class Human(Mammal): pass | ||||||
|  |         class Dog(Mammal): pass | ||||||
|  |  | ||||||
|  |         Animal().save() | ||||||
|  |         Fish().save() | ||||||
|  |         Mammal().save() | ||||||
|  |         Human().save() | ||||||
|  |         Dog().save() | ||||||
|  |  | ||||||
|  |         classes = [obj.__class__ for obj in Animal.objects] | ||||||
|  |         self.assertEqual(classes, [Animal, Fish, Mammal, Human, Dog]) | ||||||
|  |  | ||||||
|  |         classes = [obj.__class__ for obj in Mammal.objects] | ||||||
|  |         self.assertEqual(classes, [Mammal, Human, Dog]) | ||||||
|  |  | ||||||
|  |         classes = [obj.__class__ for obj in Human.objects] | ||||||
|  |         self.assertEqual(classes, [Human]) | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_inheritance(self): | ||||||
|  |         """Ensure that document may inherit fields from a superclass document. | ||||||
|  |         """ | ||||||
|  |         class Employee(self.Person): | ||||||
|  |             salary = IntField() | ||||||
|  |  | ||||||
|  |         self.assertTrue('name' in Employee._fields) | ||||||
|  |         self.assertTrue('salary' in Employee._fields) | ||||||
|  |         self.assertEqual(Employee._meta['collection'], | ||||||
|  |                          self.Person._meta['collection']) | ||||||
|  |  | ||||||
|  |         # Ensure that MRO error is not raised | ||||||
|  |         class A(Document): pass | ||||||
|  |         class B(A): pass | ||||||
|  |         class C(B): pass | ||||||
|  |  | ||||||
|  |     def test_allow_inheritance(self): | ||||||
|  |         """Ensure that inheritance may be disabled on simple classes and that | ||||||
|  |         _cls and _types will not be used. | ||||||
|  |         """ | ||||||
|  |         class Animal(Document): | ||||||
|  |             meta = {'allow_inheritance': False} | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |  | ||||||
|  |         def create_dog_class(): | ||||||
|  |             class Dog(Animal): | ||||||
|  |                 pass | ||||||
|  |         self.assertRaises(ValueError, create_dog_class) | ||||||
|  |  | ||||||
|  |         # Check that _cls etc aren't present on simple documents | ||||||
|  |         dog = Animal(name='dog') | ||||||
|  |         dog.save() | ||||||
|  |         collection = self.db[Animal._meta['collection']] | ||||||
|  |         obj = collection.find_one() | ||||||
|  |         self.assertFalse('_cls' in obj) | ||||||
|  |         self.assertFalse('_types' in obj) | ||||||
|  |  | ||||||
|  |         Animal.drop_collection() | ||||||
|  |  | ||||||
|  |         def create_employee_class(): | ||||||
|  |             class Employee(self.Person): | ||||||
|  |                 meta = {'allow_inheritance': False} | ||||||
|  |         self.assertRaises(ValueError, create_employee_class) | ||||||
|  |  | ||||||
|  |         # Test the same for embedded documents | ||||||
|  |         class Comment(EmbeddedDocument): | ||||||
|  |             content = StringField() | ||||||
|  |             meta = {'allow_inheritance': False} | ||||||
|  |  | ||||||
|  |         def create_special_comment(): | ||||||
|  |             class SpecialComment(Comment): | ||||||
|  |                 pass | ||||||
|  |         self.assertRaises(ValueError, create_special_comment) | ||||||
|  |  | ||||||
|  |         comment = Comment(content='test') | ||||||
|  |         self.assertFalse('_cls' in comment.to_mongo()) | ||||||
|  |         self.assertFalse('_types' in comment.to_mongo()) | ||||||
|  |  | ||||||
|  |     def test_collection_name(self): | ||||||
|  |         """Ensure that a collection with a specified name may be used. | ||||||
|  |         """ | ||||||
|  |         collection = 'personCollTest' | ||||||
|  |         if collection in self.db.collection_names(): | ||||||
|  |             self.db.drop_collection(collection) | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             meta = {'collection': collection} | ||||||
|  |  | ||||||
|  |         user = Person(name="Test User") | ||||||
|  |         user.save() | ||||||
|  |         self.assertTrue(collection in self.db.collection_names()) | ||||||
|  |  | ||||||
|  |         user_obj = self.db[collection].find_one() | ||||||
|  |         self.assertEqual(user_obj['name'], "Test User") | ||||||
|  |  | ||||||
|  |         user_obj = Person.objects[0] | ||||||
|  |         self.assertEqual(user_obj.name, "Test User") | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |         self.assertFalse(collection in self.db.collection_names()) | ||||||
|  |  | ||||||
|  |     def test_inherited_collections(self): | ||||||
|  |         """Ensure that subclassed documents don't override parents' collections. | ||||||
|  |         """ | ||||||
|  |         class Drink(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class AlcoholicDrink(Drink): | ||||||
|  |             meta = {'collection': 'booze'} | ||||||
|  |  | ||||||
|  |         class Drinker(Document): | ||||||
|  |             drink = GenericReferenceField() | ||||||
|  |  | ||||||
|  |         Drink.drop_collection() | ||||||
|  |         AlcoholicDrink.drop_collection() | ||||||
|  |         Drinker.drop_collection() | ||||||
|  |  | ||||||
|  |         red_bull = Drink(name='Red Bull') | ||||||
|  |         red_bull.save() | ||||||
|  |  | ||||||
|  |         programmer = Drinker(drink=red_bull) | ||||||
|  |         programmer.save() | ||||||
|  |  | ||||||
|  |         beer = AlcoholicDrink(name='Beer') | ||||||
|  |         beer.save() | ||||||
|  |  | ||||||
|  |         real_person = Drinker(drink=beer) | ||||||
|  |         real_person.save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(Drinker.objects[0].drink.name, red_bull.name) | ||||||
|  |         self.assertEqual(Drinker.objects[1].drink.name, beer.name) | ||||||
|  |  | ||||||
|  |     def test_capped_collection(self): | ||||||
|  |         """Ensure that capped collections work properly. | ||||||
|  |         """ | ||||||
|  |         class Log(Document): | ||||||
|  |             date = DateTimeField(default=datetime.now) | ||||||
|  |             meta = { | ||||||
|  |                 'max_documents': 10, | ||||||
|  |                 'max_size': 90000, | ||||||
|  |             } | ||||||
|  |  | ||||||
|  |         Log.drop_collection() | ||||||
|  |  | ||||||
|  |         # Ensure that the collection handles up to its maximum | ||||||
|  |         for i in range(10): | ||||||
|  |             Log().save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(len(Log.objects), 10) | ||||||
|  |  | ||||||
|  |         # Check that extra documents don't increase the size | ||||||
|  |         Log().save() | ||||||
|  |         self.assertEqual(len(Log.objects), 10) | ||||||
|  |  | ||||||
|  |         options = Log.objects._collection.options() | ||||||
|  |         self.assertEqual(options['capped'], True) | ||||||
|  |         self.assertEqual(options['max'], 10) | ||||||
|  |         self.assertEqual(options['size'], 90000) | ||||||
|  |  | ||||||
|  |         # Check that the document cannot be redefined with different options | ||||||
|  |         def recreate_log_document(): | ||||||
|  |             class Log(Document): | ||||||
|  |                 date = DateTimeField(default=datetime.now) | ||||||
|  |                 meta = { | ||||||
|  |                     'max_documents': 11, | ||||||
|  |                 } | ||||||
|  |             # Create the collection by accessing Document.objects | ||||||
|  |             Log.objects | ||||||
|  |         self.assertRaises(InvalidCollectionError, recreate_log_document) | ||||||
|  |  | ||||||
|  |         Log.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_indexes(self): | ||||||
|  |         """Ensure that indexes are used when meta[indexes] is specified. | ||||||
|  |         """ | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             date = DateTimeField(db_field='addDate', default=datetime.now) | ||||||
|  |             category = StringField() | ||||||
|  |             tags = ListField(StringField()) | ||||||
|  |             meta = { | ||||||
|  |                 'indexes': [ | ||||||
|  |                     '-date', | ||||||
|  |                     'tags', | ||||||
|  |                     ('category', '-date') | ||||||
|  |                 ], | ||||||
|  |             } | ||||||
|  |  | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         info = BlogPost.objects._collection.index_information() | ||||||
|  |         # _id, types, '-date', 'tags', ('cat', 'date') | ||||||
|  |         self.assertEqual(len(info), 5) | ||||||
|  |  | ||||||
|  |         # Indexes are lazy so use list() to perform query | ||||||
|  |         list(BlogPost.objects) | ||||||
|  |         info = BlogPost.objects._collection.index_information() | ||||||
|  |         info = [value['key'] for key, value in info.iteritems()] | ||||||
|  |         self.assertTrue([('_types', 1), ('category', 1), ('addDate', -1)] | ||||||
|  |                         in info) | ||||||
|  |         self.assertTrue([('_types', 1), ('addDate', -1)] in info) | ||||||
|  |         # tags is a list field so it shouldn't have _types in the index | ||||||
|  |         self.assertTrue([('tags', 1)] in info) | ||||||
|  |  | ||||||
|  |         class ExtendedBlogPost(BlogPost): | ||||||
|  |             title = StringField() | ||||||
|  |             meta = {'indexes': ['title']} | ||||||
|  |  | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         list(ExtendedBlogPost.objects) | ||||||
|  |         info = ExtendedBlogPost.objects._collection.index_information() | ||||||
|  |         info = [value['key'] for key, value in info.iteritems()] | ||||||
|  |         self.assertTrue([('_types', 1), ('category', 1), ('addDate', -1)] | ||||||
|  |                         in info) | ||||||
|  |         self.assertTrue([('_types', 1), ('addDate', -1)] in info) | ||||||
|  |         self.assertTrue([('_types', 1), ('title', 1)] in info) | ||||||
|  |  | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_unique(self): | ||||||
|  |         """Ensure that uniqueness constraints are applied to fields. | ||||||
|  |         """ | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             title = StringField() | ||||||
|  |             slug = StringField(unique=True) | ||||||
|  |  | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         post1 = BlogPost(title='test1', slug='test') | ||||||
|  |         post1.save() | ||||||
|  |  | ||||||
|  |         # Two posts with the same slug is not allowed | ||||||
|  |         post2 = BlogPost(title='test2', slug='test') | ||||||
|  |         self.assertRaises(OperationError, post2.save) | ||||||
|  |  | ||||||
|  |         class Date(EmbeddedDocument): | ||||||
|  |             year = IntField(db_field='yr') | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             title = StringField() | ||||||
|  |             date = EmbeddedDocumentField(Date) | ||||||
|  |             slug = StringField(unique_with='date.year') | ||||||
|  |  | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         post1 = BlogPost(title='test1', date=Date(year=2009), slug='test') | ||||||
|  |         post1.save() | ||||||
|  |  | ||||||
|  |         # day is different so won't raise exception | ||||||
|  |         post2 = BlogPost(title='test2', date=Date(year=2010), slug='test') | ||||||
|  |         post2.save() | ||||||
|  |  | ||||||
|  |         # Now there will be two docs with the same slug and the same day: fail | ||||||
|  |         post3 = BlogPost(title='test3', date=Date(year=2010), slug='test') | ||||||
|  |         self.assertRaises(OperationError, post3.save) | ||||||
|  |  | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_custom_id_field(self): | ||||||
|  |         """Ensure that documents may be created with custom primary keys. | ||||||
|  |         """ | ||||||
|  |         class User(Document): | ||||||
|  |             username = StringField(primary_key=True) | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         User.drop_collection() | ||||||
|  |  | ||||||
|  |         self.assertEqual(User._fields['username'].db_field, '_id') | ||||||
|  |         self.assertEqual(User._meta['id_field'], 'username') | ||||||
|  |  | ||||||
|  |         def create_invalid_user(): | ||||||
|  |             User(name='test').save() # no primary key field | ||||||
|  |         self.assertRaises(ValidationError, create_invalid_user) | ||||||
|  |  | ||||||
|  |         def define_invalid_user(): | ||||||
|  |             class EmailUser(User): | ||||||
|  |                 email = StringField(primary_key=True) | ||||||
|  |         self.assertRaises(ValueError, define_invalid_user) | ||||||
|  |  | ||||||
|  |         class EmailUser(User): | ||||||
|  |             email = StringField() | ||||||
|  |  | ||||||
|  |         user = User(username='test', name='test user') | ||||||
|  |         user.save() | ||||||
|  |  | ||||||
|  |         user_obj = User.objects.first() | ||||||
|  |         self.assertEqual(user_obj.id, 'test') | ||||||
|  |         self.assertEqual(user_obj.pk, 'test') | ||||||
|  |  | ||||||
|  |         user_son = User.objects._collection.find_one() | ||||||
|  |         self.assertEqual(user_son['_id'], 'test') | ||||||
|  |         self.assertTrue('username' not in user_son['_id']) | ||||||
|  |  | ||||||
|  |         User.drop_collection() | ||||||
|  |  | ||||||
|  |         user = User(pk='mongo', name='mongo user') | ||||||
|  |         user.save() | ||||||
|  |  | ||||||
|  |         user_obj = User.objects.first() | ||||||
|  |         self.assertEqual(user_obj.id, 'mongo') | ||||||
|  |         self.assertEqual(user_obj.pk, 'mongo') | ||||||
|  |  | ||||||
|  |         user_son = User.objects._collection.find_one() | ||||||
|  |         self.assertEqual(user_son['_id'], 'mongo') | ||||||
|  |         self.assertTrue('username' not in user_son['_id']) | ||||||
|  |  | ||||||
|  |         User.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_creation(self): | ||||||
|  |         """Ensure that document may be created using keyword arguments. | ||||||
|  |         """ | ||||||
|  |         person = self.Person(name="Test User", age=30) | ||||||
|  |         self.assertEqual(person.name, "Test User") | ||||||
|  |         self.assertEqual(person.age, 30) | ||||||
|  |  | ||||||
|  |     def test_reload(self): | ||||||
|  |         """Ensure that attributes may be reloaded. | ||||||
|  |         """ | ||||||
|  |         person = self.Person(name="Test User", age=20) | ||||||
|  |         person.save() | ||||||
|  |  | ||||||
|  |         person_obj = self.Person.objects.first() | ||||||
|  |         person_obj.name = "Mr Test User" | ||||||
|  |         person_obj.age = 21 | ||||||
|  |         person_obj.save() | ||||||
|  |  | ||||||
|  |         self.assertEqual(person.name, "Test User") | ||||||
|  |         self.assertEqual(person.age, 20) | ||||||
|  |  | ||||||
|  |         person.reload() | ||||||
|  |         self.assertEqual(person.name, "Mr Test User") | ||||||
|  |         self.assertEqual(person.age, 21) | ||||||
|  |  | ||||||
|  |     def test_dictionary_access(self): | ||||||
|  |         """Ensure that dictionary-style field access works properly. | ||||||
|  |         """ | ||||||
|  |         person = self.Person(name='Test User', age=30) | ||||||
|  |         self.assertEquals(person['name'], 'Test User') | ||||||
|  |  | ||||||
|  |         self.assertRaises(KeyError, person.__getitem__, 'salary') | ||||||
|  |         self.assertRaises(KeyError, person.__setitem__, 'salary', 50) | ||||||
|  |  | ||||||
|  |         person['name'] = 'Another User' | ||||||
|  |         self.assertEquals(person['name'], 'Another User') | ||||||
|  |  | ||||||
|  |         # Length = length(assigned fields + id) | ||||||
|  |         self.assertEquals(len(person), 3) | ||||||
|  |  | ||||||
|  |         self.assertTrue('age' in person) | ||||||
|  |         person.age = None | ||||||
|  |         self.assertFalse('age' in person) | ||||||
|  |         self.assertFalse('nationality' in person) | ||||||
|  |  | ||||||
|  |     def test_embedded_document(self): | ||||||
|  |         """Ensure that embedded documents are set up correctly. | ||||||
|  |         """ | ||||||
|  |         class Comment(EmbeddedDocument): | ||||||
|  |             content = StringField() | ||||||
|  |  | ||||||
|  |         self.assertTrue('content' in Comment._fields) | ||||||
|  |         self.assertFalse('id' in Comment._fields) | ||||||
|  |         self.assertFalse('collection' in Comment._meta) | ||||||
|  |  | ||||||
|  |     def test_embedded_document_validation(self): | ||||||
|  |         """Ensure that embedded documents may be validated. | ||||||
|  |         """ | ||||||
|  |         class Comment(EmbeddedDocument): | ||||||
|  |             date = DateTimeField() | ||||||
|  |             content = StringField(required=True) | ||||||
|  |  | ||||||
|  |         comment = Comment() | ||||||
|  |         self.assertRaises(ValidationError, comment.validate) | ||||||
|  |  | ||||||
|  |         comment.content = 'test' | ||||||
|  |         comment.validate() | ||||||
|  |  | ||||||
|  |         comment.date = 4 | ||||||
|  |         self.assertRaises(ValidationError, comment.validate) | ||||||
|  |  | ||||||
|  |         comment.date = datetime.now() | ||||||
|  |         comment.validate() | ||||||
|  |  | ||||||
|  |     def test_save(self): | ||||||
|  |         """Ensure that a document may be saved in the database. | ||||||
|  |         """ | ||||||
|  |         # Create person object and save it to the database | ||||||
|  |         person = self.Person(name='Test User', age=30) | ||||||
|  |         person.save() | ||||||
|  |         # Ensure that the object is in the database | ||||||
|  |         collection = self.db[self.Person._meta['collection']] | ||||||
|  |         person_obj = collection.find_one({'name': 'Test User'}) | ||||||
|  |         self.assertEqual(person_obj['name'], 'Test User') | ||||||
|  |         self.assertEqual(person_obj['age'], 30) | ||||||
|  |         self.assertEqual(person_obj['_id'], person.id) | ||||||
|  |         # Test skipping validation on save | ||||||
|  |         class Recipient(Document): | ||||||
|  |             email = EmailField(required=True) | ||||||
|  |  | ||||||
|  |         recipient = Recipient(email='root@localhost') | ||||||
|  |         self.assertRaises(ValidationError, recipient.save) | ||||||
|  |         try: | ||||||
|  |             recipient.save(validate=False) | ||||||
|  |         except ValidationError: | ||||||
|  |             fail() | ||||||
|  |  | ||||||
|  |     def test_delete(self): | ||||||
|  |         """Ensure that document may be deleted using the delete method. | ||||||
|  |         """ | ||||||
|  |         person = self.Person(name="Test User", age=30) | ||||||
|  |         person.save() | ||||||
|  |         self.assertEqual(len(self.Person.objects), 1) | ||||||
|  |         person.delete() | ||||||
|  |         self.assertEqual(len(self.Person.objects), 0) | ||||||
|  |  | ||||||
|  |     def test_save_custom_id(self): | ||||||
|  |         """Ensure that a document may be saved with a custom _id. | ||||||
|  |         """ | ||||||
|  |         # Create person object and save it to the database | ||||||
|  |         person = self.Person(name='Test User', age=30, | ||||||
|  |                              id='497ce96f395f2f052a494fd4') | ||||||
|  |         person.save() | ||||||
|  |         # Ensure that the object is in the database with the correct _id | ||||||
|  |         collection = self.db[self.Person._meta['collection']] | ||||||
|  |         person_obj = collection.find_one({'name': 'Test User'}) | ||||||
|  |         self.assertEqual(str(person_obj['_id']), '497ce96f395f2f052a494fd4') | ||||||
|  |  | ||||||
|  |     def test_save_custom_pk(self): | ||||||
|  |         """Ensure that a document may be saved with a custom _id using pk alias. | ||||||
|  |         """ | ||||||
|  |         # Create person object and save it to the database | ||||||
|  |         person = self.Person(name='Test User', age=30, | ||||||
|  |                              pk='497ce96f395f2f052a494fd4') | ||||||
|  |         person.save() | ||||||
|  |         # Ensure that the object is in the database with the correct _id | ||||||
|  |         collection = self.db[self.Person._meta['collection']] | ||||||
|  |         person_obj = collection.find_one({'name': 'Test User'}) | ||||||
|  |         self.assertEqual(str(person_obj['_id']), '497ce96f395f2f052a494fd4') | ||||||
|  |  | ||||||
|  |     def test_save_list(self): | ||||||
|  |         """Ensure that a list field may be properly saved. | ||||||
|  |         """ | ||||||
|  |         class Comment(EmbeddedDocument): | ||||||
|  |             content = StringField() | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             content = StringField() | ||||||
|  |             comments = ListField(EmbeddedDocumentField(Comment)) | ||||||
|  |             tags = ListField(StringField()) | ||||||
|  |  | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         post = BlogPost(content='Went for a walk today...') | ||||||
|  |         post.tags = tags = ['fun', 'leisure'] | ||||||
|  |         comments = [Comment(content='Good for you'), Comment(content='Yay.')] | ||||||
|  |         post.comments = comments | ||||||
|  |         post.save() | ||||||
|  |  | ||||||
|  |         collection = self.db[BlogPost._meta['collection']] | ||||||
|  |         post_obj = collection.find_one() | ||||||
|  |         self.assertEqual(post_obj['tags'], tags) | ||||||
|  |         for comment_obj, comment in zip(post_obj['comments'], comments): | ||||||
|  |             self.assertEqual(comment_obj['content'], comment['content']) | ||||||
|  |  | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_save_embedded_document(self): | ||||||
|  |         """Ensure that a document with an embedded document field may be | ||||||
|  |         saved in the database. | ||||||
|  |         """ | ||||||
|  |         class EmployeeDetails(EmbeddedDocument): | ||||||
|  |             position = StringField() | ||||||
|  |  | ||||||
|  |         class Employee(self.Person): | ||||||
|  |             salary = IntField() | ||||||
|  |             details = EmbeddedDocumentField(EmployeeDetails) | ||||||
|  |  | ||||||
|  |         # Create employee object and save it to the database | ||||||
|  |         employee = Employee(name='Test Employee', age=50, salary=20000) | ||||||
|  |         employee.details = EmployeeDetails(position='Developer') | ||||||
|  |         employee.save() | ||||||
|  |  | ||||||
|  |         # Ensure that the object is in the database | ||||||
|  |         collection = self.db[self.Person._meta['collection']] | ||||||
|  |         employee_obj = collection.find_one({'name': 'Test Employee'}) | ||||||
|  |         self.assertEqual(employee_obj['name'], 'Test Employee') | ||||||
|  |         self.assertEqual(employee_obj['age'], 50) | ||||||
|  |         # Ensure that the 'details' embedded object saved correctly | ||||||
|  |         self.assertEqual(employee_obj['details']['position'], 'Developer') | ||||||
|  |  | ||||||
|  |     def test_save_reference(self): | ||||||
|  |         """Ensure that a document reference field may be saved in the database. | ||||||
|  |         """ | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             meta = {'collection': 'blogpost_1'} | ||||||
|  |             content = StringField() | ||||||
|  |             author = ReferenceField(self.Person) | ||||||
|  |  | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         author = self.Person(name='Test User') | ||||||
|  |         author.save() | ||||||
|  |  | ||||||
|  |         post = BlogPost(content='Watched some TV today... how exciting.') | ||||||
|  |         # Should only reference author when saving | ||||||
|  |         post.author = author | ||||||
|  |         post.save() | ||||||
|  |  | ||||||
|  |         post_obj = BlogPost.objects.first() | ||||||
|  |  | ||||||
|  |         # Test laziness | ||||||
|  |         self.assertTrue(isinstance(post_obj._data['author'], | ||||||
|  |                                    bson.dbref.DBRef)) | ||||||
|  |         self.assertTrue(isinstance(post_obj.author, self.Person)) | ||||||
|  |         self.assertEqual(post_obj.author.name, 'Test User') | ||||||
|  |  | ||||||
|  |         # Ensure that the dereferenced object may be changed and saved | ||||||
|  |         post_obj.author.age = 25 | ||||||
|  |         post_obj.author.save() | ||||||
|  |  | ||||||
|  |         author = list(self.Person.objects(name='Test User'))[-1] | ||||||
|  |         self.assertEqual(author.age, 25) | ||||||
|  |  | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |     def tearDown(self): | ||||||
|  |         self.Person.drop_collection() | ||||||
|  |  | ||||||
|  |  | ||||||
|  | if __name__ == '__main__': | ||||||
|  |     unittest.main() | ||||||
							
								
								
									
										788
									
								
								tests/fields.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										788
									
								
								tests/fields.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,788 @@ | |||||||
|  | import unittest | ||||||
|  | import datetime | ||||||
|  | from decimal import Decimal | ||||||
|  |  | ||||||
|  | import pymongo | ||||||
|  | import gridfs | ||||||
|  |  | ||||||
|  | from mongoengine import * | ||||||
|  | from mongoengine.connection import _get_db | ||||||
|  |  | ||||||
|  |  | ||||||
|  | class FieldTest(unittest.TestCase): | ||||||
|  |  | ||||||
|  |     def setUp(self): | ||||||
|  |         connect(db='mongoenginetest') | ||||||
|  |         self.db = _get_db() | ||||||
|  |  | ||||||
|  |     def test_default_values(self): | ||||||
|  |         """Ensure that default field values are used when creating a document. | ||||||
|  |         """ | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             age = IntField(default=30) | ||||||
|  |             userid = StringField(default=lambda: 'test') | ||||||
|  |  | ||||||
|  |         person = Person(name='Test Person') | ||||||
|  |         self.assertEqual(person._data['age'], 30) | ||||||
|  |         self.assertEqual(person._data['userid'], 'test') | ||||||
|  |  | ||||||
|  |     def test_required_values(self): | ||||||
|  |         """Ensure that required field constraints are enforced. | ||||||
|  |         """ | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField(required=True) | ||||||
|  |             age = IntField(required=True) | ||||||
|  |             userid = StringField() | ||||||
|  |  | ||||||
|  |         person = Person(name="Test User") | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |         person = Person(age=30) | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |  | ||||||
|  |     def test_object_id_validation(self): | ||||||
|  |         """Ensure that invalid values cannot be assigned to string fields. | ||||||
|  |         """ | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |          | ||||||
|  |         person = Person(name='Test User') | ||||||
|  |         self.assertEqual(person.id, None) | ||||||
|  |  | ||||||
|  |         person.id = 47 | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |  | ||||||
|  |         person.id = 'abc' | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |  | ||||||
|  |         person.id = '497ce96f395f2f052a494fd4' | ||||||
|  |         person.validate() | ||||||
|  |  | ||||||
|  |     def test_string_validation(self): | ||||||
|  |         """Ensure that invalid values cannot be assigned to string fields. | ||||||
|  |         """ | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField(max_length=20) | ||||||
|  |             userid = StringField(r'[0-9a-z_]+$') | ||||||
|  |  | ||||||
|  |         person = Person(name=34) | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |  | ||||||
|  |         # Test regex validation on userid | ||||||
|  |         person = Person(userid='test.User') | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |  | ||||||
|  |         person.userid = 'test_user' | ||||||
|  |         self.assertEqual(person.userid, 'test_user') | ||||||
|  |         person.validate() | ||||||
|  |  | ||||||
|  |         # Test max length validation on name | ||||||
|  |         person = Person(name='Name that is more than twenty characters') | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |  | ||||||
|  |         person.name = 'Shorter name' | ||||||
|  |         person.validate() | ||||||
|  |  | ||||||
|  |     def test_url_validation(self): | ||||||
|  |         """Ensure that URLFields validate urls properly. | ||||||
|  |         """ | ||||||
|  |         class Link(Document): | ||||||
|  |             url = URLField() | ||||||
|  |  | ||||||
|  |         link = Link() | ||||||
|  |         link.url = 'google' | ||||||
|  |         self.assertRaises(ValidationError, link.validate) | ||||||
|  |  | ||||||
|  |         link.url = 'http://www.google.com:8080' | ||||||
|  |         link.validate() | ||||||
|  |          | ||||||
|  |     def test_int_validation(self): | ||||||
|  |         """Ensure that invalid values cannot be assigned to int fields. | ||||||
|  |         """ | ||||||
|  |         class Person(Document): | ||||||
|  |             age = IntField(min_value=0, max_value=110) | ||||||
|  |  | ||||||
|  |         person = Person() | ||||||
|  |         person.age = 50 | ||||||
|  |         person.validate() | ||||||
|  |  | ||||||
|  |         person.age = -1 | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |         person.age = 120 | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |         person.age = 'ten' | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |  | ||||||
|  |     def test_float_validation(self): | ||||||
|  |         """Ensure that invalid values cannot be assigned to float fields. | ||||||
|  |         """ | ||||||
|  |         class Person(Document): | ||||||
|  |             height = FloatField(min_value=0.1, max_value=3.5) | ||||||
|  |  | ||||||
|  |         person = Person() | ||||||
|  |         person.height = 1.89 | ||||||
|  |         person.validate() | ||||||
|  |  | ||||||
|  |         person.height = '2.0' | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |         person.height = 0.01 | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |         person.height = 4.0 | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |          | ||||||
|  |     def test_decimal_validation(self): | ||||||
|  |         """Ensure that invalid values cannot be assigned to decimal fields. | ||||||
|  |         """ | ||||||
|  |         class Person(Document): | ||||||
|  |             height = DecimalField(min_value=Decimal('0.1'),  | ||||||
|  |                                   max_value=Decimal('3.5')) | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |         person = Person() | ||||||
|  |         person.height = Decimal('1.89') | ||||||
|  |         person.save() | ||||||
|  |         person.reload() | ||||||
|  |         self.assertEqual(person.height, Decimal('1.89')) | ||||||
|  |  | ||||||
|  |         person.height = '2.0' | ||||||
|  |         person.save() | ||||||
|  |         person.height = 0.01 | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |         person.height = Decimal('0.01') | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |         person.height = Decimal('4.0') | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |  | ||||||
|  |         Person.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_boolean_validation(self): | ||||||
|  |         """Ensure that invalid values cannot be assigned to boolean fields. | ||||||
|  |         """ | ||||||
|  |         class Person(Document): | ||||||
|  |             admin = BooleanField() | ||||||
|  |  | ||||||
|  |         person = Person() | ||||||
|  |         person.admin = True | ||||||
|  |         person.validate() | ||||||
|  |  | ||||||
|  |         person.admin = 2 | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |         person.admin = 'Yes' | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |  | ||||||
|  |     def test_datetime_validation(self): | ||||||
|  |         """Ensure that invalid values cannot be assigned to datetime fields. | ||||||
|  |         """ | ||||||
|  |         class LogEntry(Document): | ||||||
|  |             time = DateTimeField() | ||||||
|  |  | ||||||
|  |         log = LogEntry() | ||||||
|  |         log.time = datetime.datetime.now() | ||||||
|  |         log.validate() | ||||||
|  |  | ||||||
|  |         log.time = -1 | ||||||
|  |         self.assertRaises(ValidationError, log.validate) | ||||||
|  |         log.time = '1pm' | ||||||
|  |         self.assertRaises(ValidationError, log.validate) | ||||||
|  |  | ||||||
|  |     def test_list_validation(self): | ||||||
|  |         """Ensure that a list field only accepts lists with valid elements. | ||||||
|  |         """ | ||||||
|  |         class User(Document): | ||||||
|  |             pass | ||||||
|  |  | ||||||
|  |         class Comment(EmbeddedDocument): | ||||||
|  |             content = StringField() | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             content = StringField() | ||||||
|  |             comments = ListField(EmbeddedDocumentField(Comment)) | ||||||
|  |             tags = ListField(StringField()) | ||||||
|  |             authors = ListField(ReferenceField(User)) | ||||||
|  |  | ||||||
|  |         post = BlogPost(content='Went for a walk today...') | ||||||
|  |         post.validate() | ||||||
|  |  | ||||||
|  |         post.tags = 'fun' | ||||||
|  |         self.assertRaises(ValidationError, post.validate) | ||||||
|  |         post.tags = [1, 2] | ||||||
|  |         self.assertRaises(ValidationError, post.validate) | ||||||
|  |  | ||||||
|  |         post.tags = ['fun', 'leisure'] | ||||||
|  |         post.validate() | ||||||
|  |         post.tags = ('fun', 'leisure') | ||||||
|  |         post.validate() | ||||||
|  |  | ||||||
|  |         post.comments = ['a'] | ||||||
|  |         self.assertRaises(ValidationError, post.validate) | ||||||
|  |         post.comments = 'yay' | ||||||
|  |         self.assertRaises(ValidationError, post.validate) | ||||||
|  |  | ||||||
|  |         comments = [Comment(content='Good for you'), Comment(content='Yay.')] | ||||||
|  |         post.comments = comments | ||||||
|  |         post.validate() | ||||||
|  |  | ||||||
|  |         post.authors = [Comment()] | ||||||
|  |         self.assertRaises(ValidationError, post.validate) | ||||||
|  |  | ||||||
|  |         post.authors = [User()] | ||||||
|  |         post.validate() | ||||||
|  |  | ||||||
|  |     def test_sorted_list_sorting(self): | ||||||
|  |         """Ensure that a sorted list field properly sorts values. | ||||||
|  |         """ | ||||||
|  |         class Comment(EmbeddedDocument): | ||||||
|  |             order = IntField() | ||||||
|  |             content = StringField() | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             content = StringField() | ||||||
|  |             comments = SortedListField(EmbeddedDocumentField(Comment), | ||||||
|  |                                        ordering='order') | ||||||
|  |             tags = SortedListField(StringField()) | ||||||
|  |  | ||||||
|  |         post = BlogPost(content='Went for a walk today...') | ||||||
|  |         post.save() | ||||||
|  |  | ||||||
|  |         post.tags = ['leisure', 'fun'] | ||||||
|  |         post.save() | ||||||
|  |         post.reload() | ||||||
|  |         self.assertEqual(post.tags, ['fun', 'leisure']) | ||||||
|  |          | ||||||
|  |         comment1 = Comment(content='Good for you', order=1) | ||||||
|  |         comment2 = Comment(content='Yay.', order=0) | ||||||
|  |         comments = [comment1, comment2] | ||||||
|  |         post.comments = comments | ||||||
|  |         post.save() | ||||||
|  |         post.reload() | ||||||
|  |         self.assertEqual(post.comments[0].content, comment2.content) | ||||||
|  |         self.assertEqual(post.comments[1].content, comment1.content) | ||||||
|  |  | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_dict_validation(self): | ||||||
|  |         """Ensure that dict types work as expected. | ||||||
|  |         """ | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             info = DictField() | ||||||
|  |  | ||||||
|  |         post = BlogPost() | ||||||
|  |         post.info = 'my post' | ||||||
|  |         self.assertRaises(ValidationError, post.validate) | ||||||
|  |  | ||||||
|  |         post.info = ['test', 'test'] | ||||||
|  |         self.assertRaises(ValidationError, post.validate) | ||||||
|  |  | ||||||
|  |         post.info = {'$title': 'test'} | ||||||
|  |         self.assertRaises(ValidationError, post.validate) | ||||||
|  |  | ||||||
|  |         post.info = {'the.title': 'test'} | ||||||
|  |         self.assertRaises(ValidationError, post.validate) | ||||||
|  |  | ||||||
|  |         post.info = {'title': 'test'} | ||||||
|  |         post.validate() | ||||||
|  |  | ||||||
|  |     def test_embedded_document_validation(self): | ||||||
|  |         """Ensure that invalid embedded documents cannot be assigned to | ||||||
|  |         embedded document fields. | ||||||
|  |         """ | ||||||
|  |         class Comment(EmbeddedDocument): | ||||||
|  |             content = StringField() | ||||||
|  |  | ||||||
|  |         class PersonPreferences(EmbeddedDocument): | ||||||
|  |             food = StringField(required=True) | ||||||
|  |             number = IntField() | ||||||
|  |  | ||||||
|  |         class Person(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             preferences = EmbeddedDocumentField(PersonPreferences) | ||||||
|  |  | ||||||
|  |         person = Person(name='Test User') | ||||||
|  |         person.preferences = 'My Preferences' | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |  | ||||||
|  |         # Check that only the right embedded doc works | ||||||
|  |         person.preferences = Comment(content='Nice blog post...') | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |  | ||||||
|  |         # Check that the embedded doc is valid | ||||||
|  |         person.preferences = PersonPreferences() | ||||||
|  |         self.assertRaises(ValidationError, person.validate) | ||||||
|  |  | ||||||
|  |         person.preferences = PersonPreferences(food='Cheese', number=47) | ||||||
|  |         self.assertEqual(person.preferences.food, 'Cheese') | ||||||
|  |         person.validate() | ||||||
|  |  | ||||||
|  |     def test_embedded_document_inheritance(self): | ||||||
|  |         """Ensure that subclasses of embedded documents may be provided to  | ||||||
|  |         EmbeddedDocumentFields of the superclass' type. | ||||||
|  |         """ | ||||||
|  |         class User(EmbeddedDocument): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class PowerUser(User): | ||||||
|  |             power = IntField() | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             content = StringField() | ||||||
|  |             author = EmbeddedDocumentField(User) | ||||||
|  |          | ||||||
|  |         post = BlogPost(content='What I did today...') | ||||||
|  |         post.author = User(name='Test User') | ||||||
|  |         post.author = PowerUser(name='Test User', power=47) | ||||||
|  |  | ||||||
|  |     def test_reference_validation(self): | ||||||
|  |         """Ensure that invalid docment objects cannot be assigned to reference | ||||||
|  |         fields. | ||||||
|  |         """ | ||||||
|  |         class User(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             content = StringField() | ||||||
|  |             author = ReferenceField(User) | ||||||
|  |  | ||||||
|  |         User.drop_collection() | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         self.assertRaises(ValidationError, ReferenceField, EmbeddedDocument) | ||||||
|  |  | ||||||
|  |         user = User(name='Test User') | ||||||
|  |  | ||||||
|  |         # Ensure that the referenced object must have been saved | ||||||
|  |         post1 = BlogPost(content='Chips and gravy taste good.') | ||||||
|  |         post1.author = user | ||||||
|  |         self.assertRaises(ValidationError, post1.save) | ||||||
|  |  | ||||||
|  |         # Check that an invalid object type cannot be used | ||||||
|  |         post2 = BlogPost(content='Chips and chilli taste good.') | ||||||
|  |         post1.author = post2 | ||||||
|  |         self.assertRaises(ValidationError, post1.validate) | ||||||
|  |  | ||||||
|  |         user.save() | ||||||
|  |         post1.author = user | ||||||
|  |         post1.save() | ||||||
|  |  | ||||||
|  |         post2.save() | ||||||
|  |         post1.author = post2 | ||||||
|  |         self.assertRaises(ValidationError, post1.validate) | ||||||
|  |  | ||||||
|  |         User.drop_collection() | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |      | ||||||
|  |     def test_list_item_dereference(self): | ||||||
|  |         """Ensure that DBRef items in ListFields are dereferenced. | ||||||
|  |         """ | ||||||
|  |         class User(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         class Group(Document): | ||||||
|  |             members = ListField(ReferenceField(User)) | ||||||
|  |  | ||||||
|  |         User.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |         user1 = User(name='user1') | ||||||
|  |         user1.save() | ||||||
|  |         user2 = User(name='user2') | ||||||
|  |         user2.save() | ||||||
|  |  | ||||||
|  |         group = Group(members=[user1, user2]) | ||||||
|  |         group.save() | ||||||
|  |  | ||||||
|  |         group_obj = Group.objects.first() | ||||||
|  |  | ||||||
|  |         self.assertEqual(group_obj.members[0].name, user1.name) | ||||||
|  |         self.assertEqual(group_obj.members[1].name, user2.name) | ||||||
|  |  | ||||||
|  |         User.drop_collection() | ||||||
|  |         Group.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_recursive_reference(self): | ||||||
|  |         """Ensure that ReferenceFields can reference their own documents. | ||||||
|  |         """ | ||||||
|  |         class Employee(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             boss = ReferenceField('self') | ||||||
|  |             friends = ListField(ReferenceField('self')) | ||||||
|  |  | ||||||
|  |         bill = Employee(name='Bill Lumbergh') | ||||||
|  |         bill.save() | ||||||
|  |  | ||||||
|  |         michael = Employee(name='Michael Bolton') | ||||||
|  |         michael.save() | ||||||
|  |  | ||||||
|  |         samir = Employee(name='Samir Nagheenanajar') | ||||||
|  |         samir.save() | ||||||
|  |  | ||||||
|  |         friends = [michael, samir] | ||||||
|  |         peter = Employee(name='Peter Gibbons', boss=bill, friends=friends) | ||||||
|  |         peter.save() | ||||||
|  |  | ||||||
|  |         peter = Employee.objects.with_id(peter.id) | ||||||
|  |         self.assertEqual(peter.boss, bill) | ||||||
|  |         self.assertEqual(peter.friends, friends) | ||||||
|  |  | ||||||
|  |     def test_recursive_embedding(self): | ||||||
|  |         """Ensure that EmbeddedDocumentFields can contain their own documents. | ||||||
|  |         """ | ||||||
|  |         class Tree(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             children = ListField(EmbeddedDocumentField('TreeNode')) | ||||||
|  |  | ||||||
|  |         class TreeNode(EmbeddedDocument): | ||||||
|  |             name = StringField() | ||||||
|  |             children = ListField(EmbeddedDocumentField('self')) | ||||||
|  |          | ||||||
|  |         tree = Tree(name="Tree") | ||||||
|  |  | ||||||
|  |         first_child = TreeNode(name="Child 1") | ||||||
|  |         tree.children.append(first_child) | ||||||
|  |  | ||||||
|  |         second_child = TreeNode(name="Child 2") | ||||||
|  |         first_child.children.append(second_child) | ||||||
|  |          | ||||||
|  |         third_child = TreeNode(name="Child 3") | ||||||
|  |         first_child.children.append(third_child) | ||||||
|  |  | ||||||
|  |         tree.save() | ||||||
|  |  | ||||||
|  |         tree_obj = Tree.objects.first() | ||||||
|  |         self.assertEqual(len(tree.children), 1) | ||||||
|  |         self.assertEqual(tree.children[0].name, first_child.name) | ||||||
|  |         self.assertEqual(tree.children[0].children[0].name, second_child.name) | ||||||
|  |         self.assertEqual(tree.children[0].children[1].name, third_child.name) | ||||||
|  |  | ||||||
|  |     def test_undefined_reference(self): | ||||||
|  |         """Ensure that ReferenceFields may reference undefined Documents. | ||||||
|  |         """ | ||||||
|  |         class Product(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             company = ReferenceField('Company') | ||||||
|  |  | ||||||
|  |         class Company(Document): | ||||||
|  |             name = StringField() | ||||||
|  |  | ||||||
|  |         ten_gen = Company(name='10gen') | ||||||
|  |         ten_gen.save() | ||||||
|  |         mongodb = Product(name='MongoDB', company=ten_gen) | ||||||
|  |         mongodb.save() | ||||||
|  |  | ||||||
|  |         obj = Product.objects(company=ten_gen).first() | ||||||
|  |         self.assertEqual(obj, mongodb) | ||||||
|  |         self.assertEqual(obj.company, ten_gen) | ||||||
|  |  | ||||||
|  |     def test_reference_query_conversion(self): | ||||||
|  |         """Ensure that ReferenceFields can be queried using objects and values | ||||||
|  |         of the type of the primary key of the referenced object. | ||||||
|  |         """ | ||||||
|  |         class Member(Document): | ||||||
|  |             user_num = IntField(primary_key=True) | ||||||
|  |  | ||||||
|  |         class BlogPost(Document): | ||||||
|  |             title = StringField() | ||||||
|  |             author = ReferenceField(Member) | ||||||
|  |  | ||||||
|  |         Member.drop_collection() | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |  | ||||||
|  |         m1 = Member(user_num=1) | ||||||
|  |         m1.save() | ||||||
|  |         m2 = Member(user_num=2) | ||||||
|  |         m2.save() | ||||||
|  |  | ||||||
|  |         post1 = BlogPost(title='post 1', author=m1) | ||||||
|  |         post1.save() | ||||||
|  |  | ||||||
|  |         post2 = BlogPost(title='post 2', author=m2) | ||||||
|  |         post2.save() | ||||||
|  |  | ||||||
|  |         post = BlogPost.objects(author=m1).first() | ||||||
|  |         self.assertEqual(post.id, post1.id) | ||||||
|  |  | ||||||
|  |         post = BlogPost.objects(author=m2).first() | ||||||
|  |         self.assertEqual(post.id, post2.id) | ||||||
|  |  | ||||||
|  |         Member.drop_collection() | ||||||
|  |         BlogPost.drop_collection() | ||||||
|  |          | ||||||
|  |     def test_generic_reference(self): | ||||||
|  |         """Ensure that a GenericReferenceField properly dereferences items. | ||||||
|  |         """ | ||||||
|  |         class Link(Document): | ||||||
|  |             title = StringField() | ||||||
|  |             meta = {'allow_inheritance': False} | ||||||
|  |              | ||||||
|  |         class Post(Document): | ||||||
|  |             title = StringField() | ||||||
|  |              | ||||||
|  |         class Bookmark(Document): | ||||||
|  |             bookmark_object = GenericReferenceField() | ||||||
|  |              | ||||||
|  |         Link.drop_collection() | ||||||
|  |         Post.drop_collection() | ||||||
|  |         Bookmark.drop_collection() | ||||||
|  |      | ||||||
|  |         link_1 = Link(title="Pitchfork") | ||||||
|  |         link_1.save() | ||||||
|  |      | ||||||
|  |         post_1 = Post(title="Behind the Scenes of the Pavement Reunion") | ||||||
|  |         post_1.save() | ||||||
|  |          | ||||||
|  |         bm = Bookmark(bookmark_object=post_1) | ||||||
|  |         bm.save() | ||||||
|  |          | ||||||
|  |         bm = Bookmark.objects(bookmark_object=post_1).first() | ||||||
|  |          | ||||||
|  |         self.assertEqual(bm.bookmark_object, post_1) | ||||||
|  |         self.assertTrue(isinstance(bm.bookmark_object, Post)) | ||||||
|  |          | ||||||
|  |         bm.bookmark_object = link_1 | ||||||
|  |         bm.save() | ||||||
|  |          | ||||||
|  |         bm = Bookmark.objects(bookmark_object=link_1).first() | ||||||
|  |          | ||||||
|  |         self.assertEqual(bm.bookmark_object, link_1) | ||||||
|  |         self.assertTrue(isinstance(bm.bookmark_object, Link)) | ||||||
|  |      | ||||||
|  |         Link.drop_collection() | ||||||
|  |         Post.drop_collection() | ||||||
|  |         Bookmark.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_generic_reference_list(self): | ||||||
|  |         """Ensure that a ListField properly dereferences generic references. | ||||||
|  |         """ | ||||||
|  |         class Link(Document): | ||||||
|  |             title = StringField() | ||||||
|  |      | ||||||
|  |         class Post(Document): | ||||||
|  |             title = StringField() | ||||||
|  |      | ||||||
|  |         class User(Document): | ||||||
|  |             bookmarks = ListField(GenericReferenceField()) | ||||||
|  |      | ||||||
|  |         Link.drop_collection() | ||||||
|  |         Post.drop_collection() | ||||||
|  |         User.drop_collection() | ||||||
|  |      | ||||||
|  |         link_1 = Link(title="Pitchfork") | ||||||
|  |         link_1.save() | ||||||
|  |      | ||||||
|  |         post_1 = Post(title="Behind the Scenes of the Pavement Reunion") | ||||||
|  |         post_1.save() | ||||||
|  |      | ||||||
|  |         user = User(bookmarks=[post_1, link_1]) | ||||||
|  |         user.save() | ||||||
|  |          | ||||||
|  |         user = User.objects(bookmarks__all=[post_1, link_1]).first() | ||||||
|  |          | ||||||
|  |         self.assertEqual(user.bookmarks[0], post_1) | ||||||
|  |         self.assertEqual(user.bookmarks[1], link_1) | ||||||
|  |          | ||||||
|  |         Link.drop_collection() | ||||||
|  |         Post.drop_collection() | ||||||
|  |         User.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_binary_fields(self): | ||||||
|  |         """Ensure that binary fields can be stored and retrieved. | ||||||
|  |         """ | ||||||
|  |         class Attachment(Document): | ||||||
|  |             content_type = StringField() | ||||||
|  |             blob = BinaryField() | ||||||
|  |  | ||||||
|  |         BLOB = '\xe6\x00\xc4\xff\x07' | ||||||
|  |         MIME_TYPE = 'application/octet-stream' | ||||||
|  |  | ||||||
|  |         Attachment.drop_collection() | ||||||
|  |  | ||||||
|  |         attachment = Attachment(content_type=MIME_TYPE, blob=BLOB) | ||||||
|  |         attachment.save() | ||||||
|  |  | ||||||
|  |         attachment_1 = Attachment.objects().first() | ||||||
|  |         self.assertEqual(MIME_TYPE, attachment_1.content_type) | ||||||
|  |         self.assertEqual(BLOB, attachment_1.blob) | ||||||
|  |  | ||||||
|  |         Attachment.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_binary_validation(self): | ||||||
|  |         """Ensure that invalid values cannot be assigned to binary fields. | ||||||
|  |         """ | ||||||
|  |         class Attachment(Document): | ||||||
|  |             blob = BinaryField() | ||||||
|  |  | ||||||
|  |         class AttachmentRequired(Document): | ||||||
|  |             blob = BinaryField(required=True) | ||||||
|  |  | ||||||
|  |         class AttachmentSizeLimit(Document): | ||||||
|  |             blob = BinaryField(max_bytes=4) | ||||||
|  |  | ||||||
|  |         Attachment.drop_collection() | ||||||
|  |         AttachmentRequired.drop_collection() | ||||||
|  |         AttachmentSizeLimit.drop_collection() | ||||||
|  |  | ||||||
|  |         attachment = Attachment() | ||||||
|  |         attachment.validate() | ||||||
|  |         attachment.blob = 2 | ||||||
|  |         self.assertRaises(ValidationError, attachment.validate) | ||||||
|  |  | ||||||
|  |         attachment_required = AttachmentRequired() | ||||||
|  |         self.assertRaises(ValidationError, attachment_required.validate) | ||||||
|  |         attachment_required.blob = '\xe6\x00\xc4\xff\x07' | ||||||
|  |         attachment_required.validate() | ||||||
|  |  | ||||||
|  |         attachment_size_limit = AttachmentSizeLimit(blob='\xe6\x00\xc4\xff\x07') | ||||||
|  |         self.assertRaises(ValidationError, attachment_size_limit.validate) | ||||||
|  |         attachment_size_limit.blob = '\xe6\x00\xc4\xff' | ||||||
|  |         attachment_size_limit.validate() | ||||||
|  |  | ||||||
|  |         Attachment.drop_collection() | ||||||
|  |         AttachmentRequired.drop_collection() | ||||||
|  |         AttachmentSizeLimit.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_choices_validation(self): | ||||||
|  |         """Ensure that value is in a container of allowed values. | ||||||
|  |         """ | ||||||
|  |         class Shirt(Document): | ||||||
|  |             size = StringField(max_length=3, choices=('S','M','L','XL','XXL')) | ||||||
|  |  | ||||||
|  |         Shirt.drop_collection() | ||||||
|  |  | ||||||
|  |         shirt = Shirt() | ||||||
|  |         shirt.validate() | ||||||
|  |  | ||||||
|  |         shirt.size = "S" | ||||||
|  |         shirt.validate() | ||||||
|  |  | ||||||
|  |         shirt.size = "XS" | ||||||
|  |         self.assertRaises(ValidationError, shirt.validate) | ||||||
|  |  | ||||||
|  |         Shirt.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_file_fields(self): | ||||||
|  |         """Ensure that file fields can be written to and their data retrieved | ||||||
|  |         """ | ||||||
|  |         class PutFile(Document): | ||||||
|  |             file = FileField() | ||||||
|  |  | ||||||
|  |         class StreamFile(Document): | ||||||
|  |             file = FileField() | ||||||
|  |  | ||||||
|  |         class SetFile(Document): | ||||||
|  |             file = FileField() | ||||||
|  |  | ||||||
|  |         text = 'Hello, World!' | ||||||
|  |         more_text = 'Foo Bar' | ||||||
|  |         content_type = 'text/plain' | ||||||
|  |  | ||||||
|  |         PutFile.drop_collection() | ||||||
|  |         StreamFile.drop_collection() | ||||||
|  |         SetFile.drop_collection() | ||||||
|  |  | ||||||
|  |         putfile = PutFile() | ||||||
|  |         putfile.file.put(text, content_type=content_type) | ||||||
|  |         putfile.save() | ||||||
|  |         putfile.validate() | ||||||
|  |         result = PutFile.objects.first() | ||||||
|  |         self.assertTrue(putfile == result) | ||||||
|  |         self.assertEquals(result.file.read(), text) | ||||||
|  |         self.assertEquals(result.file.content_type, content_type) | ||||||
|  |         result.file.delete() # Remove file from GridFS | ||||||
|  |  | ||||||
|  |         streamfile = StreamFile() | ||||||
|  |         streamfile.file.new_file(content_type=content_type) | ||||||
|  |         streamfile.file.write(text) | ||||||
|  |         streamfile.file.write(more_text) | ||||||
|  |         streamfile.file.close() | ||||||
|  |         streamfile.save() | ||||||
|  |         streamfile.validate() | ||||||
|  |         result = StreamFile.objects.first() | ||||||
|  |         self.assertTrue(streamfile == result) | ||||||
|  |         self.assertEquals(result.file.read(), text + more_text) | ||||||
|  |         self.assertEquals(result.file.content_type, content_type) | ||||||
|  |         result.file.delete() | ||||||
|  |  | ||||||
|  |         # Ensure deleted file returns None | ||||||
|  |         self.assertTrue(result.file.read() == None) | ||||||
|  |  | ||||||
|  |         setfile = SetFile() | ||||||
|  |         setfile.file = text | ||||||
|  |         setfile.save() | ||||||
|  |         setfile.validate() | ||||||
|  |         result = SetFile.objects.first() | ||||||
|  |         self.assertTrue(setfile == result) | ||||||
|  |         self.assertEquals(result.file.read(), text) | ||||||
|  |  | ||||||
|  |         # Try replacing file with new one | ||||||
|  |         result.file.replace(more_text) | ||||||
|  |         result.save() | ||||||
|  |         result.validate() | ||||||
|  |         result = SetFile.objects.first() | ||||||
|  |         self.assertTrue(setfile == result) | ||||||
|  |         self.assertEquals(result.file.read(), more_text) | ||||||
|  |         result.file.delete()  | ||||||
|  |  | ||||||
|  |         PutFile.drop_collection() | ||||||
|  |         StreamFile.drop_collection() | ||||||
|  |         SetFile.drop_collection() | ||||||
|  |  | ||||||
|  |         # Make sure FileField is optional and not required | ||||||
|  |         class DemoFile(Document): | ||||||
|  |             file = FileField() | ||||||
|  |         d = DemoFile.objects.create() | ||||||
|  |  | ||||||
|  |     def test_file_uniqueness(self): | ||||||
|  |         """Ensure that each instance of a FileField is unique | ||||||
|  |         """ | ||||||
|  |         class TestFile(Document): | ||||||
|  |             name = StringField() | ||||||
|  |             file = FileField() | ||||||
|  |  | ||||||
|  |         # First instance | ||||||
|  |         testfile = TestFile() | ||||||
|  |         testfile.name = "Hello, World!" | ||||||
|  |         testfile.file.put('Hello, World!') | ||||||
|  |         testfile.save() | ||||||
|  |  | ||||||
|  |         # Second instance | ||||||
|  |         testfiledupe = TestFile() | ||||||
|  |         data = testfiledupe.file.read() # Should be None | ||||||
|  |  | ||||||
|  |         self.assertTrue(testfile.name != testfiledupe.name) | ||||||
|  |         self.assertTrue(testfile.file.read() != data) | ||||||
|  |  | ||||||
|  |         TestFile.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_geo_indexes(self): | ||||||
|  |         """Ensure that indexes are created automatically for GeoPointFields. | ||||||
|  |         """ | ||||||
|  |         class Event(Document): | ||||||
|  |             title = StringField() | ||||||
|  |             location = GeoPointField() | ||||||
|  |  | ||||||
|  |         Event.drop_collection() | ||||||
|  |         event = Event(title="Coltrane Motion @ Double Door", | ||||||
|  |                       location=[41.909889, -87.677137]) | ||||||
|  |         event.save() | ||||||
|  |  | ||||||
|  |         info = Event.objects._collection.index_information() | ||||||
|  |         self.assertTrue(u'location_2d' in info) | ||||||
|  |         self.assertTrue(info[u'location_2d']['key'] == [(u'location', u'2d')]) | ||||||
|  |  | ||||||
|  |         Event.drop_collection() | ||||||
|  |  | ||||||
|  |     def test_ensure_unique_default_instances(self): | ||||||
|  |         """Ensure that every field has it's own unique default instance.""" | ||||||
|  |         class D(Document): | ||||||
|  |             data = DictField() | ||||||
|  |             data2 = DictField(default=lambda: {}) | ||||||
|  |  | ||||||
|  |         d1 = D() | ||||||
|  |         d1.data['foo'] = 'bar' | ||||||
|  |         d1.data2['foo'] = 'bar' | ||||||
|  |         d2 = D() | ||||||
|  |         self.assertEqual(d2.data, {}) | ||||||
|  |         self.assertEqual(d2.data2, {}) | ||||||
|  |  | ||||||
|  | if __name__ == '__main__': | ||||||
|  |     unittest.main() | ||||||
| @@ -1,23 +0,0 @@ | |||||||
| from datetime import datetime |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class PickleEmbedded(EmbeddedDocument): |  | ||||||
|     date = DateTimeField(default=datetime.now) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class PickleTest(Document): |  | ||||||
|     number = IntField() |  | ||||||
|     string = StringField(choices=(('One', '1'), ('Two', '2'))) |  | ||||||
|     embedded = EmbeddedDocumentField(PickleEmbedded) |  | ||||||
|     lists = ListField(StringField()) |  | ||||||
|     photo = FileField() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Mixin(object): |  | ||||||
|     name = StringField() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Base(Document): |  | ||||||
|     meta = {'allow_inheritance': True} |  | ||||||
										
											Binary file not shown.
										
									
								
							| Before Width: | Height: | Size: 8.1 KiB | 
							
								
								
									
										1521
									
								
								tests/queryset.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1521
									
								
								tests/queryset.py
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,98 +0,0 @@ | |||||||
| import datetime |  | ||||||
| import pymongo |  | ||||||
| import unittest |  | ||||||
|  |  | ||||||
| import mongoengine.connection |  | ||||||
|  |  | ||||||
| from bson.tz_util import utc |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
| from mongoengine.connection import get_db, get_connection, ConnectionError |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ConnectionTest(unittest.TestCase): |  | ||||||
|  |  | ||||||
|     def tearDown(self): |  | ||||||
|         mongoengine.connection._connection_settings = {} |  | ||||||
|         mongoengine.connection._connections = {} |  | ||||||
|         mongoengine.connection._dbs = {} |  | ||||||
|  |  | ||||||
|     def test_connect(self): |  | ||||||
|         """Ensure that the connect() method works properly. |  | ||||||
|         """ |  | ||||||
|         connect('mongoenginetest') |  | ||||||
|  |  | ||||||
|         conn = get_connection() |  | ||||||
|         self.assertTrue(isinstance(conn, pymongo.connection.Connection)) |  | ||||||
|  |  | ||||||
|         db = get_db() |  | ||||||
|         self.assertTrue(isinstance(db, pymongo.database.Database)) |  | ||||||
|         self.assertEqual(db.name, 'mongoenginetest') |  | ||||||
|  |  | ||||||
|         connect('mongoenginetest2', alias='testdb') |  | ||||||
|         conn = get_connection('testdb') |  | ||||||
|         self.assertTrue(isinstance(conn, pymongo.connection.Connection)) |  | ||||||
|  |  | ||||||
|     def test_connect_uri(self): |  | ||||||
|         """Ensure that the connect() method works properly with uri's |  | ||||||
|         """ |  | ||||||
|         c = connect(db='mongoenginetest', alias='admin') |  | ||||||
|         c.admin.system.users.remove({}) |  | ||||||
|         c.mongoenginetest.system.users.remove({}) |  | ||||||
|  |  | ||||||
|         c.admin.add_user("admin", "password") |  | ||||||
|         c.admin.authenticate("admin", "password") |  | ||||||
|         c.mongoenginetest.add_user("username", "password") |  | ||||||
|  |  | ||||||
|         self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://test:password@localhost') |  | ||||||
|  |  | ||||||
|         connect("testdb_uri", host='mongodb://username:password@localhost/mongoenginetest') |  | ||||||
|  |  | ||||||
|         conn = get_connection() |  | ||||||
|         self.assertTrue(isinstance(conn, pymongo.connection.Connection)) |  | ||||||
|  |  | ||||||
|         db = get_db() |  | ||||||
|         self.assertTrue(isinstance(db, pymongo.database.Database)) |  | ||||||
|         self.assertEqual(db.name, 'mongoenginetest') |  | ||||||
|  |  | ||||||
|     def test_register_connection(self): |  | ||||||
|         """Ensure that connections with different aliases may be registered. |  | ||||||
|         """ |  | ||||||
|         register_connection('testdb', 'mongoenginetest2') |  | ||||||
|  |  | ||||||
|         self.assertRaises(ConnectionError, get_connection) |  | ||||||
|         conn = get_connection('testdb') |  | ||||||
|         self.assertTrue(isinstance(conn, pymongo.connection.Connection)) |  | ||||||
|  |  | ||||||
|         db = get_db('testdb') |  | ||||||
|         self.assertTrue(isinstance(db, pymongo.database.Database)) |  | ||||||
|         self.assertEqual(db.name, 'mongoenginetest2') |  | ||||||
|  |  | ||||||
|     def test_connection_kwargs(self): |  | ||||||
|         """Ensure that connection kwargs get passed to pymongo. |  | ||||||
|         """ |  | ||||||
|         connect('mongoenginetest', alias='t1', tz_aware=True) |  | ||||||
|         conn = get_connection('t1') |  | ||||||
|  |  | ||||||
|         self.assertTrue(conn.tz_aware) |  | ||||||
|  |  | ||||||
|         connect('mongoenginetest2', alias='t2') |  | ||||||
|         conn = get_connection('t2') |  | ||||||
|         self.assertFalse(conn.tz_aware) |  | ||||||
|  |  | ||||||
|     def test_datetime(self): |  | ||||||
|         connect('mongoenginetest', tz_aware=True) |  | ||||||
|         d = datetime.datetime(2010, 5, 5, tzinfo=utc) |  | ||||||
|  |  | ||||||
|         class DateDoc(Document): |  | ||||||
|             the_date = DateTimeField(required=True) |  | ||||||
|  |  | ||||||
|         DateDoc.drop_collection() |  | ||||||
|         DateDoc(the_date=d).save() |  | ||||||
|  |  | ||||||
|         date_doc = DateDoc.objects.first() |  | ||||||
|         self.assertEqual(d, date_doc.the_date) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     unittest.main() |  | ||||||
| @@ -1,865 +0,0 @@ | |||||||
| import unittest |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
| from mongoengine.connection import get_db |  | ||||||
| from mongoengine.tests import query_counter |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class FieldTest(unittest.TestCase): |  | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         connect(db='mongoenginetest') |  | ||||||
|         self.db = get_db() |  | ||||||
|  |  | ||||||
|     def test_list_item_dereference(self): |  | ||||||
|         """Ensure that DBRef items in ListFields are dereferenced. |  | ||||||
|         """ |  | ||||||
|         class User(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class Group(Document): |  | ||||||
|             members = ListField(ReferenceField(User)) |  | ||||||
|  |  | ||||||
|         User.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|         for i in xrange(1, 51): |  | ||||||
|             user = User(name='user %s' % i) |  | ||||||
|             user.save() |  | ||||||
|  |  | ||||||
|         group = Group(members=User.objects) |  | ||||||
|         group.save() |  | ||||||
|  |  | ||||||
|         group = Group(members=User.objects) |  | ||||||
|         group.save() |  | ||||||
|  |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first() |  | ||||||
|             self.assertEqual(q, 1) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|         # Document select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first().select_related() |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|         # Queryset select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|             group_objs = Group.objects.select_related() |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|             for group_obj in group_objs: |  | ||||||
|                 [m for m in group_obj.members] |  | ||||||
|                 self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|         User.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_recursive_reference(self): |  | ||||||
|         """Ensure that ReferenceFields can reference their own documents. |  | ||||||
|         """ |  | ||||||
|         class Employee(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             boss = ReferenceField('self') |  | ||||||
|             friends = ListField(ReferenceField('self')) |  | ||||||
|  |  | ||||||
|         Employee.drop_collection() |  | ||||||
|  |  | ||||||
|         bill = Employee(name='Bill Lumbergh') |  | ||||||
|         bill.save() |  | ||||||
|  |  | ||||||
|         michael = Employee(name='Michael Bolton') |  | ||||||
|         michael.save() |  | ||||||
|  |  | ||||||
|         samir = Employee(name='Samir Nagheenanajar') |  | ||||||
|         samir.save() |  | ||||||
|  |  | ||||||
|         friends = [michael, samir] |  | ||||||
|         peter = Employee(name='Peter Gibbons', boss=bill, friends=friends) |  | ||||||
|         peter.save() |  | ||||||
|  |  | ||||||
|         Employee(name='Funky Gibbon', boss=bill, friends=friends).save() |  | ||||||
|         Employee(name='Funky Gibbon', boss=bill, friends=friends).save() |  | ||||||
|         Employee(name='Funky Gibbon', boss=bill, friends=friends).save() |  | ||||||
|  |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             peter = Employee.objects.with_id(peter.id) |  | ||||||
|             self.assertEqual(q, 1) |  | ||||||
|  |  | ||||||
|             peter.boss |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             peter.friends |  | ||||||
|             self.assertEqual(q, 3) |  | ||||||
|  |  | ||||||
|         # Document select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             peter = Employee.objects.with_id(peter.id).select_related() |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             self.assertEquals(peter.boss, bill) |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             self.assertEquals(peter.friends, friends) |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|         # Queryset select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             employees = Employee.objects(boss=bill).select_related() |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             for employee in employees: |  | ||||||
|                 self.assertEquals(employee.boss, bill) |  | ||||||
|                 self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|                 self.assertEquals(employee.friends, friends) |  | ||||||
|                 self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|     def test_circular_reference(self): |  | ||||||
|         """Ensure you can handle circular references |  | ||||||
|         """ |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             relations = ListField(EmbeddedDocumentField('Relation')) |  | ||||||
|  |  | ||||||
|             def __repr__(self): |  | ||||||
|                 return "<Person: %s>" % self.name |  | ||||||
|  |  | ||||||
|         class Relation(EmbeddedDocument): |  | ||||||
|             name = StringField() |  | ||||||
|             person = ReferenceField('Person') |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|         mother = Person(name="Mother") |  | ||||||
|         daughter = Person(name="Daughter") |  | ||||||
|  |  | ||||||
|         mother.save() |  | ||||||
|         daughter.save() |  | ||||||
|  |  | ||||||
|         daughter_rel = Relation(name="Daughter", person=daughter) |  | ||||||
|         mother.relations.append(daughter_rel) |  | ||||||
|         mother.save() |  | ||||||
|  |  | ||||||
|         mother_rel = Relation(name="Daughter", person=mother) |  | ||||||
|         self_rel = Relation(name="Self", person=daughter) |  | ||||||
|         daughter.relations.append(mother_rel) |  | ||||||
|         daughter.relations.append(self_rel) |  | ||||||
|         daughter.save() |  | ||||||
|  |  | ||||||
|         self.assertEquals("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects()) |  | ||||||
|  |  | ||||||
|     def test_circular_reference_on_self(self): |  | ||||||
|         """Ensure you can handle circular references |  | ||||||
|         """ |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             relations = ListField(ReferenceField('self')) |  | ||||||
|  |  | ||||||
|             def __repr__(self): |  | ||||||
|                 return "<Person: %s>" % self.name |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|         mother = Person(name="Mother") |  | ||||||
|         daughter = Person(name="Daughter") |  | ||||||
|  |  | ||||||
|         mother.save() |  | ||||||
|         daughter.save() |  | ||||||
|  |  | ||||||
|         mother.relations.append(daughter) |  | ||||||
|         mother.save() |  | ||||||
|  |  | ||||||
|         daughter.relations.append(mother) |  | ||||||
|         daughter.relations.append(daughter) |  | ||||||
|         daughter.save() |  | ||||||
|  |  | ||||||
|         self.assertEquals("[<Person: Mother>, <Person: Daughter>]", "%s" % Person.objects()) |  | ||||||
|  |  | ||||||
|     def test_circular_tree_reference(self): |  | ||||||
|         """Ensure you can handle circular references with more than one level |  | ||||||
|         """ |  | ||||||
|         class Other(EmbeddedDocument): |  | ||||||
|             name = StringField() |  | ||||||
|             friends = ListField(ReferenceField('Person')) |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             other = EmbeddedDocumentField(Other, default=lambda: Other()) |  | ||||||
|  |  | ||||||
|             def __repr__(self): |  | ||||||
|                 return "<Person: %s>" % self.name |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|         paul = Person(name="Paul") |  | ||||||
|         paul.save() |  | ||||||
|         maria = Person(name="Maria") |  | ||||||
|         maria.save() |  | ||||||
|         julia = Person(name='Julia') |  | ||||||
|         julia.save() |  | ||||||
|         anna = Person(name='Anna') |  | ||||||
|         anna.save() |  | ||||||
|  |  | ||||||
|         paul.other.friends = [maria, julia, anna] |  | ||||||
|         paul.other.name = "Paul's friends" |  | ||||||
|         paul.save() |  | ||||||
|  |  | ||||||
|         maria.other.friends = [paul, julia, anna] |  | ||||||
|         maria.other.name = "Maria's friends" |  | ||||||
|         maria.save() |  | ||||||
|  |  | ||||||
|         julia.other.friends = [paul, maria, anna] |  | ||||||
|         julia.other.name = "Julia's friends" |  | ||||||
|         julia.save() |  | ||||||
|  |  | ||||||
|         anna.other.friends = [paul, maria, julia] |  | ||||||
|         anna.other.name = "Anna's friends" |  | ||||||
|         anna.save() |  | ||||||
|  |  | ||||||
|         self.assertEquals( |  | ||||||
|             "[<Person: Paul>, <Person: Maria>, <Person: Julia>, <Person: Anna>]", |  | ||||||
|             "%s" % Person.objects() |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_generic_reference(self): |  | ||||||
|  |  | ||||||
|         class UserA(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class UserB(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class UserC(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class Group(Document): |  | ||||||
|             members = ListField(GenericReferenceField()) |  | ||||||
|  |  | ||||||
|         UserA.drop_collection() |  | ||||||
|         UserB.drop_collection() |  | ||||||
|         UserC.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|         members = [] |  | ||||||
|         for i in xrange(1, 51): |  | ||||||
|             a = UserA(name='User A %s' % i) |  | ||||||
|             a.save() |  | ||||||
|  |  | ||||||
|             b = UserB(name='User B %s' % i) |  | ||||||
|             b.save() |  | ||||||
|  |  | ||||||
|             c = UserC(name='User C %s' % i) |  | ||||||
|             c.save() |  | ||||||
|  |  | ||||||
|             members += [a, b, c] |  | ||||||
|  |  | ||||||
|         group = Group(members=members) |  | ||||||
|         group.save() |  | ||||||
|  |  | ||||||
|         group = Group(members=members) |  | ||||||
|         group.save() |  | ||||||
|  |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first() |  | ||||||
|             self.assertEqual(q, 1) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             for m in group_obj.members: |  | ||||||
|                 self.assertTrue('User' in m.__class__.__name__) |  | ||||||
|  |  | ||||||
|         # Document select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first().select_related() |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             for m in group_obj.members: |  | ||||||
|                 self.assertTrue('User' in m.__class__.__name__) |  | ||||||
|  |  | ||||||
|         # Queryset select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_objs = Group.objects.select_related() |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             for group_obj in group_objs: |  | ||||||
|                 [m for m in group_obj.members] |  | ||||||
|                 self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|                 [m for m in group_obj.members] |  | ||||||
|                 self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|                 for m in group_obj.members: |  | ||||||
|                     self.assertTrue('User' in m.__class__.__name__) |  | ||||||
|  |  | ||||||
|         UserA.drop_collection() |  | ||||||
|         UserB.drop_collection() |  | ||||||
|         UserC.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_list_field_complex(self): |  | ||||||
|  |  | ||||||
|         class UserA(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class UserB(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class UserC(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class Group(Document): |  | ||||||
|             members = ListField() |  | ||||||
|  |  | ||||||
|         UserA.drop_collection() |  | ||||||
|         UserB.drop_collection() |  | ||||||
|         UserC.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|         members = [] |  | ||||||
|         for i in xrange(1, 51): |  | ||||||
|             a = UserA(name='User A %s' % i) |  | ||||||
|             a.save() |  | ||||||
|  |  | ||||||
|             b = UserB(name='User B %s' % i) |  | ||||||
|             b.save() |  | ||||||
|  |  | ||||||
|             c = UserC(name='User C %s' % i) |  | ||||||
|             c.save() |  | ||||||
|  |  | ||||||
|             members += [a, b, c] |  | ||||||
|  |  | ||||||
|         group = Group(members=members) |  | ||||||
|         group.save() |  | ||||||
|  |  | ||||||
|         group = Group(members=members) |  | ||||||
|         group.save() |  | ||||||
|  |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first() |  | ||||||
|             self.assertEqual(q, 1) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             for m in group_obj.members: |  | ||||||
|                 self.assertTrue('User' in m.__class__.__name__) |  | ||||||
|  |  | ||||||
|         # Document select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first().select_related() |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             for m in group_obj.members: |  | ||||||
|                 self.assertTrue('User' in m.__class__.__name__) |  | ||||||
|  |  | ||||||
|         # Queryset select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_objs = Group.objects.select_related() |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             for group_obj in group_objs: |  | ||||||
|                 [m for m in group_obj.members] |  | ||||||
|                 self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|                 [m for m in group_obj.members] |  | ||||||
|                 self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|                 for m in group_obj.members: |  | ||||||
|                     self.assertTrue('User' in m.__class__.__name__) |  | ||||||
|  |  | ||||||
|         UserA.drop_collection() |  | ||||||
|         UserB.drop_collection() |  | ||||||
|         UserC.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_map_field_reference(self): |  | ||||||
|  |  | ||||||
|         class User(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class Group(Document): |  | ||||||
|             members = MapField(ReferenceField(User)) |  | ||||||
|  |  | ||||||
|         User.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|         members = [] |  | ||||||
|         for i in xrange(1, 51): |  | ||||||
|             user = User(name='user %s' % i) |  | ||||||
|             user.save() |  | ||||||
|             members.append(user) |  | ||||||
|  |  | ||||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) |  | ||||||
|         group.save() |  | ||||||
|  |  | ||||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) |  | ||||||
|         group.save() |  | ||||||
|  |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first() |  | ||||||
|             self.assertEqual(q, 1) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             for k, m in group_obj.members.iteritems(): |  | ||||||
|                 self.assertTrue(isinstance(m, User)) |  | ||||||
|  |  | ||||||
|         # Document select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first().select_related() |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             for k, m in group_obj.members.iteritems(): |  | ||||||
|                 self.assertTrue(isinstance(m, User)) |  | ||||||
|  |  | ||||||
|        # Queryset select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_objs = Group.objects.select_related() |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             for group_obj in group_objs: |  | ||||||
|                 [m for m in group_obj.members] |  | ||||||
|                 self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|                 for k, m in group_obj.members.iteritems(): |  | ||||||
|                     self.assertTrue(isinstance(m, User)) |  | ||||||
|  |  | ||||||
|         User.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_dict_field(self): |  | ||||||
|  |  | ||||||
|         class UserA(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class UserB(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class UserC(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class Group(Document): |  | ||||||
|             members = DictField() |  | ||||||
|  |  | ||||||
|         UserA.drop_collection() |  | ||||||
|         UserB.drop_collection() |  | ||||||
|         UserC.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|         members = [] |  | ||||||
|         for i in xrange(1, 51): |  | ||||||
|             a = UserA(name='User A %s' % i) |  | ||||||
|             a.save() |  | ||||||
|  |  | ||||||
|             b = UserB(name='User B %s' % i) |  | ||||||
|             b.save() |  | ||||||
|  |  | ||||||
|             c = UserC(name='User C %s' % i) |  | ||||||
|             c.save() |  | ||||||
|  |  | ||||||
|             members += [a, b, c] |  | ||||||
|  |  | ||||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) |  | ||||||
|         group.save() |  | ||||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) |  | ||||||
|         group.save() |  | ||||||
|  |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first() |  | ||||||
|             self.assertEqual(q, 1) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             for k, m in group_obj.members.iteritems(): |  | ||||||
|                 self.assertTrue('User' in m.__class__.__name__) |  | ||||||
|  |  | ||||||
|         # Document select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first().select_related() |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             for k, m in group_obj.members.iteritems(): |  | ||||||
|                 self.assertTrue('User' in m.__class__.__name__) |  | ||||||
|  |  | ||||||
|         # Queryset select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_objs = Group.objects.select_related() |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             for group_obj in group_objs: |  | ||||||
|                 [m for m in group_obj.members] |  | ||||||
|                 self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|                 [m for m in group_obj.members] |  | ||||||
|                 self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|                 for k, m in group_obj.members.iteritems(): |  | ||||||
|                     self.assertTrue('User' in m.__class__.__name__) |  | ||||||
|  |  | ||||||
|         Group.objects.delete() |  | ||||||
|         Group().save() |  | ||||||
|  |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first() |  | ||||||
|             self.assertEqual(q, 1) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 1) |  | ||||||
|             self.assertEqual(group_obj.members, {}) |  | ||||||
|  |  | ||||||
|         UserA.drop_collection() |  | ||||||
|         UserB.drop_collection() |  | ||||||
|         UserC.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_dict_field_no_field_inheritance(self): |  | ||||||
|  |  | ||||||
|         class UserA(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             meta = {'allow_inheritance': False} |  | ||||||
|  |  | ||||||
|         class Group(Document): |  | ||||||
|             members = DictField() |  | ||||||
|  |  | ||||||
|         UserA.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|         members = [] |  | ||||||
|         for i in xrange(1, 51): |  | ||||||
|             a = UserA(name='User A %s' % i) |  | ||||||
|             a.save() |  | ||||||
|  |  | ||||||
|             members += [a] |  | ||||||
|  |  | ||||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) |  | ||||||
|         group.save() |  | ||||||
|  |  | ||||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) |  | ||||||
|         group.save() |  | ||||||
|  |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first() |  | ||||||
|             self.assertEqual(q, 1) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             for k, m in group_obj.members.iteritems(): |  | ||||||
|                 self.assertTrue(isinstance(m, UserA)) |  | ||||||
|  |  | ||||||
|         # Document select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first().select_related() |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             for k, m in group_obj.members.iteritems(): |  | ||||||
|                 self.assertTrue(isinstance(m, UserA)) |  | ||||||
|  |  | ||||||
|         # Queryset select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_objs = Group.objects.select_related() |  | ||||||
|             self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|             for group_obj in group_objs: |  | ||||||
|                 [m for m in group_obj.members] |  | ||||||
|                 self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|                 [m for m in group_obj.members] |  | ||||||
|                 self.assertEqual(q, 2) |  | ||||||
|  |  | ||||||
|                 for k, m in group_obj.members.iteritems(): |  | ||||||
|                     self.assertTrue(isinstance(m, UserA)) |  | ||||||
|  |  | ||||||
|         UserA.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_generic_reference_map_field(self): |  | ||||||
|  |  | ||||||
|         class UserA(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class UserB(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class UserC(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         class Group(Document): |  | ||||||
|             members = MapField(GenericReferenceField()) |  | ||||||
|  |  | ||||||
|         UserA.drop_collection() |  | ||||||
|         UserB.drop_collection() |  | ||||||
|         UserC.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|         members = [] |  | ||||||
|         for i in xrange(1, 51): |  | ||||||
|             a = UserA(name='User A %s' % i) |  | ||||||
|             a.save() |  | ||||||
|  |  | ||||||
|             b = UserB(name='User B %s' % i) |  | ||||||
|             b.save() |  | ||||||
|  |  | ||||||
|             c = UserC(name='User C %s' % i) |  | ||||||
|             c.save() |  | ||||||
|  |  | ||||||
|             members += [a, b, c] |  | ||||||
|  |  | ||||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) |  | ||||||
|         group.save() |  | ||||||
|         group = Group(members=dict([(str(u.id), u) for u in members])) |  | ||||||
|         group.save() |  | ||||||
|  |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first() |  | ||||||
|             self.assertEqual(q, 1) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             for k, m in group_obj.members.iteritems(): |  | ||||||
|                 self.assertTrue('User' in m.__class__.__name__) |  | ||||||
|  |  | ||||||
|         # Document select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first().select_related() |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             for k, m in group_obj.members.iteritems(): |  | ||||||
|                 self.assertTrue('User' in m.__class__.__name__) |  | ||||||
|  |  | ||||||
|         # Queryset select_related |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_objs = Group.objects.select_related() |  | ||||||
|             self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|             for group_obj in group_objs: |  | ||||||
|                 [m for m in group_obj.members] |  | ||||||
|                 self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|                 [m for m in group_obj.members] |  | ||||||
|                 self.assertEqual(q, 4) |  | ||||||
|  |  | ||||||
|                 for k, m in group_obj.members.iteritems(): |  | ||||||
|                     self.assertTrue('User' in m.__class__.__name__) |  | ||||||
|  |  | ||||||
|         Group.objects.delete() |  | ||||||
|         Group().save() |  | ||||||
|  |  | ||||||
|         with query_counter() as q: |  | ||||||
|             self.assertEqual(q, 0) |  | ||||||
|  |  | ||||||
|             group_obj = Group.objects.first() |  | ||||||
|             self.assertEqual(q, 1) |  | ||||||
|  |  | ||||||
|             [m for m in group_obj.members] |  | ||||||
|             self.assertEqual(q, 1) |  | ||||||
|  |  | ||||||
|         UserA.drop_collection() |  | ||||||
|         UserB.drop_collection() |  | ||||||
|         UserC.drop_collection() |  | ||||||
|         Group.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_multidirectional_lists(self): |  | ||||||
|  |  | ||||||
|         class Asset(Document): |  | ||||||
|             name = StringField(max_length=250, required=True) |  | ||||||
|             parent = GenericReferenceField(default=None) |  | ||||||
|             parents = ListField(GenericReferenceField()) |  | ||||||
|             children = ListField(GenericReferenceField()) |  | ||||||
|  |  | ||||||
|         Asset.drop_collection() |  | ||||||
|  |  | ||||||
|         root = Asset(name='', path="/", title="Site Root") |  | ||||||
|         root.save() |  | ||||||
|  |  | ||||||
|         company = Asset(name='company', title='Company', parent=root, parents=[root]) |  | ||||||
|         company.save() |  | ||||||
|  |  | ||||||
|         root.children = [company] |  | ||||||
|         root.save() |  | ||||||
|  |  | ||||||
|         root = root.reload() |  | ||||||
|         self.assertEquals(root.children, [company]) |  | ||||||
|         self.assertEquals(company.parents, [root]) |  | ||||||
|  |  | ||||||
|     def test_dict_in_dbref_instance(self): |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField(max_length=250, required=True) |  | ||||||
|  |  | ||||||
|         class Room(Document): |  | ||||||
|             number = StringField(max_length=250, required=True) |  | ||||||
|             staffs_with_position = ListField(DictField()) |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|         Room.drop_collection() |  | ||||||
|  |  | ||||||
|         bob = Person.objects.create(name='Bob') |  | ||||||
|         bob.save() |  | ||||||
|         sarah = Person.objects.create(name='Sarah') |  | ||||||
|         sarah.save() |  | ||||||
|  |  | ||||||
|         room_101 = Room.objects.create(number="101") |  | ||||||
|         room_101.staffs_with_position = [ |  | ||||||
|             {'position_key': 'window', 'staff': sarah}, |  | ||||||
|             {'position_key': 'door', 'staff': bob.to_dbref()}] |  | ||||||
|         room_101.save() |  | ||||||
|  |  | ||||||
|         room = Room.objects.first().select_related() |  | ||||||
|         self.assertEquals(room.staffs_with_position[0]['staff'], sarah) |  | ||||||
|         self.assertEquals(room.staffs_with_position[1]['staff'], bob) |  | ||||||
|  |  | ||||||
|     def test_document_reload_no_inheritance(self): |  | ||||||
|         class Foo(Document): |  | ||||||
|             meta = {'allow_inheritance': False} |  | ||||||
|             bar = ReferenceField('Bar') |  | ||||||
|             baz = ReferenceField('Baz') |  | ||||||
|  |  | ||||||
|         class Bar(Document): |  | ||||||
|             meta = {'allow_inheritance': False} |  | ||||||
|             msg = StringField(required=True, default='Blammo!') |  | ||||||
|  |  | ||||||
|         class Baz(Document): |  | ||||||
|             meta = {'allow_inheritance': False} |  | ||||||
|             msg = StringField(required=True, default='Kaboom!') |  | ||||||
|  |  | ||||||
|         Foo.drop_collection() |  | ||||||
|         Bar.drop_collection() |  | ||||||
|         Baz.drop_collection() |  | ||||||
|  |  | ||||||
|         bar = Bar() |  | ||||||
|         bar.save() |  | ||||||
|         baz = Baz() |  | ||||||
|         baz.save() |  | ||||||
|         foo = Foo() |  | ||||||
|         foo.bar = bar |  | ||||||
|         foo.baz = baz |  | ||||||
|         foo.save() |  | ||||||
|         foo.reload() |  | ||||||
|  |  | ||||||
|         self.assertEquals(type(foo.bar), Bar) |  | ||||||
|         self.assertEquals(type(foo.baz), Baz) |  | ||||||
|  |  | ||||||
|     def test_list_lookup_not_checked_in_map(self): |  | ||||||
|         """Ensure we dereference list data correctly |  | ||||||
|         """ |  | ||||||
|         class Comment(Document): |  | ||||||
|             id = IntField(primary_key=True) |  | ||||||
|             text = StringField() |  | ||||||
|  |  | ||||||
|         class Message(Document): |  | ||||||
|             id = IntField(primary_key=True) |  | ||||||
|             comments = ListField(ReferenceField(Comment)) |  | ||||||
|  |  | ||||||
|         Comment.drop_collection() |  | ||||||
|         Message.drop_collection() |  | ||||||
|  |  | ||||||
|         c1 = Comment(id=0, text='zero').save() |  | ||||||
|         c2 = Comment(id=1, text='one').save() |  | ||||||
|         Message(id=1, comments=[c1, c2]).save() |  | ||||||
|  |  | ||||||
|         msg = Message.objects.get(id=1) |  | ||||||
|         self.assertEqual(0, msg.comments[0].id) |  | ||||||
|         self.assertEqual(1, msg.comments[1].id) |  | ||||||
| @@ -1,110 +0,0 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
|  |  | ||||||
| import unittest |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
| from mongoengine.django.shortcuts import get_document_or_404 |  | ||||||
|  |  | ||||||
| from django.http import Http404 |  | ||||||
| from django.template import Context, Template |  | ||||||
| from django.conf import settings |  | ||||||
| from django.core.paginator import Paginator |  | ||||||
|  |  | ||||||
| settings.configure() |  | ||||||
|  |  | ||||||
| from django.contrib.sessions.tests import SessionTestsMixin |  | ||||||
| from mongoengine.django.sessions import SessionStore, MongoSession |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class QuerySetTest(unittest.TestCase): |  | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         connect(db='mongoenginetest') |  | ||||||
|  |  | ||||||
|         class Person(Document): |  | ||||||
|             name = StringField() |  | ||||||
|             age = IntField() |  | ||||||
|         self.Person = Person |  | ||||||
|  |  | ||||||
|     def test_order_by_in_django_template(self): |  | ||||||
|         """Ensure that QuerySets are properly ordered in Django template. |  | ||||||
|         """ |  | ||||||
|         self.Person.drop_collection() |  | ||||||
|  |  | ||||||
|         self.Person(name="A", age=20).save() |  | ||||||
|         self.Person(name="D", age=10).save() |  | ||||||
|         self.Person(name="B", age=40).save() |  | ||||||
|         self.Person(name="C", age=30).save() |  | ||||||
|  |  | ||||||
|         t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}") |  | ||||||
|  |  | ||||||
|         d = {"ol": self.Person.objects.order_by('-name')} |  | ||||||
|         self.assertEqual(t.render(Context(d)), u'D-10:C-30:B-40:A-20:') |  | ||||||
|         d = {"ol": self.Person.objects.order_by('+name')} |  | ||||||
|         self.assertEqual(t.render(Context(d)), u'A-20:B-40:C-30:D-10:') |  | ||||||
|         d = {"ol": self.Person.objects.order_by('-age')} |  | ||||||
|         self.assertEqual(t.render(Context(d)), u'B-40:C-30:A-20:D-10:') |  | ||||||
|         d = {"ol": self.Person.objects.order_by('+age')} |  | ||||||
|         self.assertEqual(t.render(Context(d)), u'D-10:A-20:C-30:B-40:') |  | ||||||
|  |  | ||||||
|         self.Person.drop_collection() |  | ||||||
|  |  | ||||||
|     def test_q_object_filter_in_template(self): |  | ||||||
|  |  | ||||||
|         self.Person.drop_collection() |  | ||||||
|  |  | ||||||
|         self.Person(name="A", age=20).save() |  | ||||||
|         self.Person(name="D", age=10).save() |  | ||||||
|         self.Person(name="B", age=40).save() |  | ||||||
|         self.Person(name="C", age=30).save() |  | ||||||
|  |  | ||||||
|         t = Template("{% for o in ol %}{{ o.name }}-{{ o.age }}:{% endfor %}") |  | ||||||
|  |  | ||||||
|         d = {"ol": self.Person.objects.filter(Q(age=10) | Q(name="C"))} |  | ||||||
|         self.assertEqual(t.render(Context(d)), 'D-10:C-30:') |  | ||||||
|  |  | ||||||
|         # Check double rendering doesn't throw an error |  | ||||||
|         self.assertEqual(t.render(Context(d)), 'D-10:C-30:') |  | ||||||
|  |  | ||||||
|     def test_get_document_or_404(self): |  | ||||||
|         p = self.Person(name="G404") |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         self.assertRaises(Http404, get_document_or_404, self.Person, pk='1234') |  | ||||||
|         self.assertEqual(p, get_document_or_404(self.Person, pk=p.pk)) |  | ||||||
|  |  | ||||||
|     def test_pagination(self): |  | ||||||
|         """Ensure that Pagination works as expected |  | ||||||
|         """ |  | ||||||
|         class Page(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         Page.drop_collection() |  | ||||||
|  |  | ||||||
|         for i in xrange(1, 11): |  | ||||||
|             Page(name=str(i)).save() |  | ||||||
|  |  | ||||||
|         paginator = Paginator(Page.objects.all(), 2) |  | ||||||
|  |  | ||||||
|         t = Template("{% for i in page.object_list  %}{{ i.name }}:{% endfor %}") |  | ||||||
|         for p in paginator.page_range: |  | ||||||
|             d = {"page": paginator.page(p)} |  | ||||||
|             end = p * 2 |  | ||||||
|             start = end - 1 |  | ||||||
|             self.assertEqual(t.render(Context(d)), u'%d:%d:' % (start, end)) |  | ||||||
|  |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class MongoDBSessionTest(SessionTestsMixin, unittest.TestCase): |  | ||||||
|     backend = SessionStore |  | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         connect(db='mongoenginetest') |  | ||||||
|         MongoSession.drop_collection() |  | ||||||
|         super(MongoDBSessionTest, self).setUp() |  | ||||||
|  |  | ||||||
|     def test_first_save(self): |  | ||||||
|         session = SessionStore() |  | ||||||
|         session['test'] = True |  | ||||||
|         session.save() |  | ||||||
|         self.assertTrue('test' in session) |  | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,502 +0,0 @@ | |||||||
| import unittest |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
| from mongoengine.connection import get_db |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class DynamicDocTest(unittest.TestCase): |  | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         connect(db='mongoenginetest') |  | ||||||
|         self.db = get_db() |  | ||||||
|  |  | ||||||
|         class Person(DynamicDocument): |  | ||||||
|             name = StringField() |  | ||||||
|             meta = {'allow_inheritance': True} |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         self.Person = Person |  | ||||||
|  |  | ||||||
|     def test_simple_dynamic_document(self): |  | ||||||
|         """Ensures simple dynamic documents are saved correctly""" |  | ||||||
|  |  | ||||||
|         p = self.Person() |  | ||||||
|         p.name = "James" |  | ||||||
|         p.age = 34 |  | ||||||
|  |  | ||||||
|         self.assertEquals(p.to_mongo(), |  | ||||||
|             {"_types": ["Person"], "_cls": "Person", |  | ||||||
|              "name": "James", "age": 34} |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         self.assertEquals(self.Person.objects.first().age, 34) |  | ||||||
|  |  | ||||||
|         # Confirm no changes to self.Person |  | ||||||
|         self.assertFalse(hasattr(self.Person, 'age')) |  | ||||||
|  |  | ||||||
|     def test_dynamic_document_delta(self): |  | ||||||
|         """Ensures simple dynamic documents can delta correctly""" |  | ||||||
|         p = self.Person(name="James", age=34) |  | ||||||
|         self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {})) |  | ||||||
|  |  | ||||||
|         p.doc = 123 |  | ||||||
|         del(p.doc) |  | ||||||
|         self.assertEquals(p._delta(), ({'_types': ['Person'], 'age': 34, 'name': 'James', '_cls': 'Person'}, {'doc': 1})) |  | ||||||
|  |  | ||||||
|     def test_change_scope_of_variable(self): |  | ||||||
|         """Test changing the scope of a dynamic field has no adverse effects""" |  | ||||||
|         p = self.Person() |  | ||||||
|         p.name = "Dean" |  | ||||||
|         p.misc = 22 |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         p = self.Person.objects.get() |  | ||||||
|         p.misc = {'hello': 'world'} |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         p = self.Person.objects.get() |  | ||||||
|         self.assertEquals(p.misc, {'hello': 'world'}) |  | ||||||
|  |  | ||||||
|     def test_delete_dynamic_field(self): |  | ||||||
|         """Test deleting a dynamic field works""" |  | ||||||
|         self.Person.drop_collection() |  | ||||||
|         p = self.Person() |  | ||||||
|         p.name = "Dean" |  | ||||||
|         p.misc = 22 |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         p = self.Person.objects.get() |  | ||||||
|         p.misc = {'hello': 'world'} |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         p = self.Person.objects.get() |  | ||||||
|         self.assertEquals(p.misc, {'hello': 'world'}) |  | ||||||
|         collection = self.db[self.Person._get_collection_name()] |  | ||||||
|         obj = collection.find_one() |  | ||||||
|         self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'misc', 'name']) |  | ||||||
|  |  | ||||||
|         del(p.misc) |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         p = self.Person.objects.get() |  | ||||||
|         self.assertFalse(hasattr(p, 'misc')) |  | ||||||
|  |  | ||||||
|         obj = collection.find_one() |  | ||||||
|         self.assertEquals(sorted(obj.keys()), ['_cls', '_id', '_types', 'name']) |  | ||||||
|  |  | ||||||
|     def test_dynamic_document_queries(self): |  | ||||||
|         """Ensure we can query dynamic fields""" |  | ||||||
|         p = self.Person() |  | ||||||
|         p.name = "Dean" |  | ||||||
|         p.age = 22 |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         self.assertEquals(1, self.Person.objects(age=22).count()) |  | ||||||
|         p = self.Person.objects(age=22) |  | ||||||
|         p = p.get() |  | ||||||
|         self.assertEquals(22, p.age) |  | ||||||
|  |  | ||||||
|     def test_complex_dynamic_document_queries(self): |  | ||||||
|         class Person(DynamicDocument): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|         Person.drop_collection() |  | ||||||
|  |  | ||||||
|         p = Person(name="test") |  | ||||||
|         p.age = "ten" |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         p1 = Person(name="test1") |  | ||||||
|         p1.age = "less then ten and a half" |  | ||||||
|         p1.save() |  | ||||||
|  |  | ||||||
|         p2 = Person(name="test2") |  | ||||||
|         p2.age = 10 |  | ||||||
|         p2.save() |  | ||||||
|  |  | ||||||
|         self.assertEquals(Person.objects(age__icontains='ten').count(), 2) |  | ||||||
|         self.assertEquals(Person.objects(age__gte=10).count(), 1) |  | ||||||
|  |  | ||||||
|     def test_complex_data_lookups(self): |  | ||||||
|         """Ensure you can query dynamic document dynamic fields""" |  | ||||||
|         p = self.Person() |  | ||||||
|         p.misc = {'hello': 'world'} |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         self.assertEquals(1, self.Person.objects(misc__hello='world').count()) |  | ||||||
|  |  | ||||||
|     def test_inheritance(self): |  | ||||||
|         """Ensure that dynamic document plays nice with inheritance""" |  | ||||||
|         class Employee(self.Person): |  | ||||||
|             salary = IntField() |  | ||||||
|  |  | ||||||
|         Employee.drop_collection() |  | ||||||
|  |  | ||||||
|         self.assertTrue('name' in Employee._fields) |  | ||||||
|         self.assertTrue('salary' in Employee._fields) |  | ||||||
|         self.assertEqual(Employee._get_collection_name(), |  | ||||||
|                          self.Person._get_collection_name()) |  | ||||||
|  |  | ||||||
|         joe_bloggs = Employee() |  | ||||||
|         joe_bloggs.name = "Joe Bloggs" |  | ||||||
|         joe_bloggs.salary = 10 |  | ||||||
|         joe_bloggs.age = 20 |  | ||||||
|         joe_bloggs.save() |  | ||||||
|  |  | ||||||
|         self.assertEquals(1, self.Person.objects(age=20).count()) |  | ||||||
|         self.assertEquals(1, Employee.objects(age=20).count()) |  | ||||||
|  |  | ||||||
|         joe_bloggs = self.Person.objects.first() |  | ||||||
|         self.assertTrue(isinstance(joe_bloggs, Employee)) |  | ||||||
|  |  | ||||||
|     def test_embedded_dynamic_document(self): |  | ||||||
|         """Test dynamic embedded documents""" |  | ||||||
|         class Embedded(DynamicEmbeddedDocument): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         class Doc(DynamicDocument): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         Doc.drop_collection() |  | ||||||
|         doc = Doc() |  | ||||||
|  |  | ||||||
|         embedded_1 = Embedded() |  | ||||||
|         embedded_1.string_field = 'hello' |  | ||||||
|         embedded_1.int_field = 1 |  | ||||||
|         embedded_1.dict_field = {'hello': 'world'} |  | ||||||
|         embedded_1.list_field = ['1', 2, {'hello': 'world'}] |  | ||||||
|         doc.embedded_field = embedded_1 |  | ||||||
|  |  | ||||||
|         self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc", |  | ||||||
|             "embedded_field": { |  | ||||||
|                 "_types": ['Embedded'], "_cls": "Embedded", |  | ||||||
|                 "string_field": "hello", |  | ||||||
|                 "int_field": 1, |  | ||||||
|                 "dict_field": {"hello": "world"}, |  | ||||||
|                 "list_field": ['1', 2, {'hello': 'world'}] |  | ||||||
|             } |  | ||||||
|         }) |  | ||||||
|         doc.save() |  | ||||||
|  |  | ||||||
|         doc = Doc.objects.first() |  | ||||||
|         self.assertEquals(doc.embedded_field.__class__, Embedded) |  | ||||||
|         self.assertEquals(doc.embedded_field.string_field, "hello") |  | ||||||
|         self.assertEquals(doc.embedded_field.int_field, 1) |  | ||||||
|         self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'}) |  | ||||||
|         self.assertEquals(doc.embedded_field.list_field, ['1', 2, {'hello': 'world'}]) |  | ||||||
|  |  | ||||||
|     def test_complex_embedded_documents(self): |  | ||||||
|         """Test complex dynamic embedded documents setups""" |  | ||||||
|         class Embedded(DynamicEmbeddedDocument): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         class Doc(DynamicDocument): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         Doc.drop_collection() |  | ||||||
|         doc = Doc() |  | ||||||
|  |  | ||||||
|         embedded_1 = Embedded() |  | ||||||
|         embedded_1.string_field = 'hello' |  | ||||||
|         embedded_1.int_field = 1 |  | ||||||
|         embedded_1.dict_field = {'hello': 'world'} |  | ||||||
|  |  | ||||||
|         embedded_2 = Embedded() |  | ||||||
|         embedded_2.string_field = 'hello' |  | ||||||
|         embedded_2.int_field = 1 |  | ||||||
|         embedded_2.dict_field = {'hello': 'world'} |  | ||||||
|         embedded_2.list_field = ['1', 2, {'hello': 'world'}] |  | ||||||
|  |  | ||||||
|         embedded_1.list_field = ['1', 2, embedded_2] |  | ||||||
|         doc.embedded_field = embedded_1 |  | ||||||
|  |  | ||||||
|         self.assertEquals(doc.to_mongo(), {"_types": ['Doc'], "_cls": "Doc", |  | ||||||
|             "embedded_field": { |  | ||||||
|                 "_types": ['Embedded'], "_cls": "Embedded", |  | ||||||
|                 "string_field": "hello", |  | ||||||
|                 "int_field": 1, |  | ||||||
|                 "dict_field": {"hello": "world"}, |  | ||||||
|                 "list_field": ['1', 2, |  | ||||||
|                     {"_types": ['Embedded'], "_cls": "Embedded", |  | ||||||
|                     "string_field": "hello", |  | ||||||
|                     "int_field": 1, |  | ||||||
|                     "dict_field": {"hello": "world"}, |  | ||||||
|                     "list_field": ['1', 2, {'hello': 'world'}]} |  | ||||||
|                 ] |  | ||||||
|             } |  | ||||||
|         }) |  | ||||||
|         doc.save() |  | ||||||
|         doc = Doc.objects.first() |  | ||||||
|         self.assertEquals(doc.embedded_field.__class__, Embedded) |  | ||||||
|         self.assertEquals(doc.embedded_field.string_field, "hello") |  | ||||||
|         self.assertEquals(doc.embedded_field.int_field, 1) |  | ||||||
|         self.assertEquals(doc.embedded_field.dict_field, {'hello': 'world'}) |  | ||||||
|         self.assertEquals(doc.embedded_field.list_field[0], '1') |  | ||||||
|         self.assertEquals(doc.embedded_field.list_field[1], 2) |  | ||||||
|  |  | ||||||
|         embedded_field = doc.embedded_field.list_field[2] |  | ||||||
|  |  | ||||||
|         self.assertEquals(embedded_field.__class__, Embedded) |  | ||||||
|         self.assertEquals(embedded_field.string_field, "hello") |  | ||||||
|         self.assertEquals(embedded_field.int_field, 1) |  | ||||||
|         self.assertEquals(embedded_field.dict_field, {'hello': 'world'}) |  | ||||||
|         self.assertEquals(embedded_field.list_field, ['1', 2, {'hello': 'world'}]) |  | ||||||
|  |  | ||||||
|     def test_delta_for_dynamic_documents(self): |  | ||||||
|         p = self.Person() |  | ||||||
|         p.name = "Dean" |  | ||||||
|         p.age = 22 |  | ||||||
|         p.save() |  | ||||||
|  |  | ||||||
|         p.age = 24 |  | ||||||
|         self.assertEquals(p.age, 24) |  | ||||||
|         self.assertEquals(p._get_changed_fields(), ['age']) |  | ||||||
|         self.assertEquals(p._delta(), ({'age': 24}, {})) |  | ||||||
|  |  | ||||||
|         p = self.Person.objects(age=22).get() |  | ||||||
|         p.age = 24 |  | ||||||
|         self.assertEquals(p.age, 24) |  | ||||||
|         self.assertEquals(p._get_changed_fields(), ['age']) |  | ||||||
|         self.assertEquals(p._delta(), ({'age': 24}, {})) |  | ||||||
|  |  | ||||||
|         p.save() |  | ||||||
|         self.assertEquals(1, self.Person.objects(age=24).count()) |  | ||||||
|  |  | ||||||
|     def test_delta(self): |  | ||||||
|  |  | ||||||
|         class Doc(DynamicDocument): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         Doc.drop_collection() |  | ||||||
|         doc = Doc() |  | ||||||
|         doc.save() |  | ||||||
|  |  | ||||||
|         doc = Doc.objects.first() |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), []) |  | ||||||
|         self.assertEquals(doc._delta(), ({}, {})) |  | ||||||
|  |  | ||||||
|         doc.string_field = 'hello' |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['string_field']) |  | ||||||
|         self.assertEquals(doc._delta(), ({'string_field': 'hello'}, {})) |  | ||||||
|  |  | ||||||
|         doc._changed_fields = [] |  | ||||||
|         doc.int_field = 1 |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['int_field']) |  | ||||||
|         self.assertEquals(doc._delta(), ({'int_field': 1}, {})) |  | ||||||
|  |  | ||||||
|         doc._changed_fields = [] |  | ||||||
|         dict_value = {'hello': 'world', 'ping': 'pong'} |  | ||||||
|         doc.dict_field = dict_value |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['dict_field']) |  | ||||||
|         self.assertEquals(doc._delta(), ({'dict_field': dict_value}, {})) |  | ||||||
|  |  | ||||||
|         doc._changed_fields = [] |  | ||||||
|         list_value = ['1', 2, {'hello': 'world'}] |  | ||||||
|         doc.list_field = list_value |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['list_field']) |  | ||||||
|         self.assertEquals(doc._delta(), ({'list_field': list_value}, {})) |  | ||||||
|  |  | ||||||
|         # Test unsetting |  | ||||||
|         doc._changed_fields = [] |  | ||||||
|         doc.dict_field = {} |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['dict_field']) |  | ||||||
|         self.assertEquals(doc._delta(), ({}, {'dict_field': 1})) |  | ||||||
|  |  | ||||||
|         doc._changed_fields = [] |  | ||||||
|         doc.list_field = [] |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['list_field']) |  | ||||||
|         self.assertEquals(doc._delta(), ({}, {'list_field': 1})) |  | ||||||
|  |  | ||||||
|     def test_delta_recursive(self): |  | ||||||
|         """Testing deltaing works with dynamic documents""" |  | ||||||
|         class Embedded(DynamicEmbeddedDocument): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         class Doc(DynamicDocument): |  | ||||||
|             pass |  | ||||||
|  |  | ||||||
|         Doc.drop_collection() |  | ||||||
|         doc = Doc() |  | ||||||
|         doc.save() |  | ||||||
|  |  | ||||||
|         doc = Doc.objects.first() |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), []) |  | ||||||
|         self.assertEquals(doc._delta(), ({}, {})) |  | ||||||
|  |  | ||||||
|         embedded_1 = Embedded() |  | ||||||
|         embedded_1.string_field = 'hello' |  | ||||||
|         embedded_1.int_field = 1 |  | ||||||
|         embedded_1.dict_field = {'hello': 'world'} |  | ||||||
|         embedded_1.list_field = ['1', 2, {'hello': 'world'}] |  | ||||||
|         doc.embedded_field = embedded_1 |  | ||||||
|  |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['embedded_field']) |  | ||||||
|  |  | ||||||
|         embedded_delta = { |  | ||||||
|             'string_field': 'hello', |  | ||||||
|             'int_field': 1, |  | ||||||
|             'dict_field': {'hello': 'world'}, |  | ||||||
|             'list_field': ['1', 2, {'hello': 'world'}] |  | ||||||
|         } |  | ||||||
|         self.assertEquals(doc.embedded_field._delta(), (embedded_delta, {})) |  | ||||||
|         embedded_delta.update({ |  | ||||||
|             '_types': ['Embedded'], |  | ||||||
|             '_cls': 'Embedded', |  | ||||||
|         }) |  | ||||||
|         self.assertEquals(doc._delta(), ({'embedded_field': embedded_delta}, {})) |  | ||||||
|  |  | ||||||
|         doc.save() |  | ||||||
|         doc.reload() |  | ||||||
|  |  | ||||||
|         doc.embedded_field.dict_field = {} |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['embedded_field.dict_field']) |  | ||||||
|         self.assertEquals(doc.embedded_field._delta(), ({}, {'dict_field': 1})) |  | ||||||
|  |  | ||||||
|         self.assertEquals(doc._delta(), ({}, {'embedded_field.dict_field': 1})) |  | ||||||
|         doc.save() |  | ||||||
|         doc.reload() |  | ||||||
|  |  | ||||||
|         doc.embedded_field.list_field = [] |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field']) |  | ||||||
|         self.assertEquals(doc.embedded_field._delta(), ({}, {'list_field': 1})) |  | ||||||
|         self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field': 1})) |  | ||||||
|         doc.save() |  | ||||||
|         doc.reload() |  | ||||||
|  |  | ||||||
|         embedded_2 = Embedded() |  | ||||||
|         embedded_2.string_field = 'hello' |  | ||||||
|         embedded_2.int_field = 1 |  | ||||||
|         embedded_2.dict_field = {'hello': 'world'} |  | ||||||
|         embedded_2.list_field = ['1', 2, {'hello': 'world'}] |  | ||||||
|  |  | ||||||
|         doc.embedded_field.list_field = ['1', 2, embedded_2] |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field']) |  | ||||||
|         self.assertEquals(doc.embedded_field._delta(), ({ |  | ||||||
|             'list_field': ['1', 2, { |  | ||||||
|                 '_cls': 'Embedded', |  | ||||||
|                 '_types': ['Embedded'], |  | ||||||
|                 'string_field': 'hello', |  | ||||||
|                 'dict_field': {'hello': 'world'}, |  | ||||||
|                 'int_field': 1, |  | ||||||
|                 'list_field': ['1', 2, {'hello': 'world'}], |  | ||||||
|             }] |  | ||||||
|         }, {})) |  | ||||||
|  |  | ||||||
|         self.assertEquals(doc._delta(), ({ |  | ||||||
|             'embedded_field.list_field': ['1', 2, { |  | ||||||
|                 '_cls': 'Embedded', |  | ||||||
|                  '_types': ['Embedded'], |  | ||||||
|                  'string_field': 'hello', |  | ||||||
|                  'dict_field': {'hello': 'world'}, |  | ||||||
|                  'int_field': 1, |  | ||||||
|                  'list_field': ['1', 2, {'hello': 'world'}], |  | ||||||
|             }] |  | ||||||
|         }, {})) |  | ||||||
|         doc.save() |  | ||||||
|         doc.reload() |  | ||||||
|  |  | ||||||
|         self.assertEquals(doc.embedded_field.list_field[2]._changed_fields, []) |  | ||||||
|         self.assertEquals(doc.embedded_field.list_field[0], '1') |  | ||||||
|         self.assertEquals(doc.embedded_field.list_field[1], 2) |  | ||||||
|         for k in doc.embedded_field.list_field[2]._fields: |  | ||||||
|             self.assertEquals(doc.embedded_field.list_field[2][k], embedded_2[k]) |  | ||||||
|  |  | ||||||
|         doc.embedded_field.list_field[2].string_field = 'world' |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field.2.string_field']) |  | ||||||
|         self.assertEquals(doc.embedded_field._delta(), ({'list_field.2.string_field': 'world'}, {})) |  | ||||||
|         self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.string_field': 'world'}, {})) |  | ||||||
|         doc.save() |  | ||||||
|         doc.reload() |  | ||||||
|         self.assertEquals(doc.embedded_field.list_field[2].string_field, 'world') |  | ||||||
|  |  | ||||||
|         # Test multiple assignments |  | ||||||
|         doc.embedded_field.list_field[2].string_field = 'hello world' |  | ||||||
|         doc.embedded_field.list_field[2] = doc.embedded_field.list_field[2] |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['embedded_field.list_field']) |  | ||||||
|         self.assertEquals(doc.embedded_field._delta(), ({ |  | ||||||
|             'list_field': ['1', 2, { |  | ||||||
|             '_types': ['Embedded'], |  | ||||||
|             '_cls': 'Embedded', |  | ||||||
|             'string_field': 'hello world', |  | ||||||
|             'int_field': 1, |  | ||||||
|             'list_field': ['1', 2, {'hello': 'world'}], |  | ||||||
|             'dict_field': {'hello': 'world'}}]}, {})) |  | ||||||
|         self.assertEquals(doc._delta(), ({ |  | ||||||
|             'embedded_field.list_field': ['1', 2, { |  | ||||||
|                 '_types': ['Embedded'], |  | ||||||
|                 '_cls': 'Embedded', |  | ||||||
|                 'string_field': 'hello world', |  | ||||||
|                 'int_field': 1, |  | ||||||
|                 'list_field': ['1', 2, {'hello': 'world'}], |  | ||||||
|                 'dict_field': {'hello': 'world'}} |  | ||||||
|             ]}, {})) |  | ||||||
|         doc.save() |  | ||||||
|         doc.reload() |  | ||||||
|         self.assertEquals(doc.embedded_field.list_field[2].string_field, 'hello world') |  | ||||||
|  |  | ||||||
|         # Test list native methods |  | ||||||
|         doc.embedded_field.list_field[2].list_field.pop(0) |  | ||||||
|         self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}]}, {})) |  | ||||||
|         doc.save() |  | ||||||
|         doc.reload() |  | ||||||
|  |  | ||||||
|         doc.embedded_field.list_field[2].list_field.append(1) |  | ||||||
|         self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [2, {'hello': 'world'}, 1]}, {})) |  | ||||||
|         doc.save() |  | ||||||
|         doc.reload() |  | ||||||
|         self.assertEquals(doc.embedded_field.list_field[2].list_field, [2, {'hello': 'world'}, 1]) |  | ||||||
|  |  | ||||||
|         doc.embedded_field.list_field[2].list_field.sort() |  | ||||||
|         doc.save() |  | ||||||
|         doc.reload() |  | ||||||
|         self.assertEquals(doc.embedded_field.list_field[2].list_field, [1, 2, {'hello': 'world'}]) |  | ||||||
|  |  | ||||||
|         del(doc.embedded_field.list_field[2].list_field[2]['hello']) |  | ||||||
|         self.assertEquals(doc._delta(), ({'embedded_field.list_field.2.list_field': [1, 2, {}]}, {})) |  | ||||||
|         doc.save() |  | ||||||
|         doc.reload() |  | ||||||
|  |  | ||||||
|         del(doc.embedded_field.list_field[2].list_field) |  | ||||||
|         self.assertEquals(doc._delta(), ({}, {'embedded_field.list_field.2.list_field': 1})) |  | ||||||
|  |  | ||||||
|         doc.save() |  | ||||||
|         doc.reload() |  | ||||||
|  |  | ||||||
|         doc.dict_field = {'embedded': embedded_1} |  | ||||||
|         doc.save() |  | ||||||
|         doc.reload() |  | ||||||
|  |  | ||||||
|         doc.dict_field['embedded'].string_field = 'Hello World' |  | ||||||
|         self.assertEquals(doc._get_changed_fields(), ['dict_field.embedded.string_field']) |  | ||||||
|         self.assertEquals(doc._delta(), ({'dict_field.embedded.string_field': 'Hello World'}, {})) |  | ||||||
|  |  | ||||||
|     def test_indexes(self): |  | ||||||
|         """Ensure that indexes are used when meta[indexes] is specified. |  | ||||||
|         """ |  | ||||||
|         class BlogPost(DynamicDocument): |  | ||||||
|             meta = { |  | ||||||
|                 'indexes': [ |  | ||||||
|                     '-date', |  | ||||||
|                     ('category', '-date') |  | ||||||
|                 ], |  | ||||||
|             } |  | ||||||
|  |  | ||||||
|         BlogPost.drop_collection() |  | ||||||
|  |  | ||||||
|         info = BlogPost.objects._collection.index_information() |  | ||||||
|         # _id, '-date', ('cat', 'date') |  | ||||||
|         # NB: there is no index on _types by itself, since |  | ||||||
|         # the indices on -date and tags will both contain |  | ||||||
|         # _types as first element in the key |  | ||||||
|         self.assertEqual(len(info), 3) |  | ||||||
|  |  | ||||||
|         # Indexes are lazy so use list() to perform query |  | ||||||
|         list(BlogPost.objects) |  | ||||||
|         info = BlogPost.objects._collection.index_information() |  | ||||||
|         info = [value['key'] for key, value in info.iteritems()] |  | ||||||
|         self.assertTrue([('_types', 1), ('category', 1), ('date', -1)] |  | ||||||
|                         in info) |  | ||||||
|         self.assertTrue([('_types', 1), ('date', -1)] in info) |  | ||||||
							
								
								
									
										2114
									
								
								tests/test_fields.py
									
									
									
									
									
								
							
							
						
						
									
										2114
									
								
								tests/test_fields.py
									
									
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,32 +0,0 @@ | |||||||
| import unittest |  | ||||||
| import pymongo |  | ||||||
| from pymongo import ReadPreference, ReplicaSetConnection |  | ||||||
|  |  | ||||||
| import mongoengine |  | ||||||
| from mongoengine import * |  | ||||||
| from mongoengine.connection import get_db, get_connection, ConnectionError |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ConnectionTest(unittest.TestCase): |  | ||||||
|  |  | ||||||
|     def tearDown(self): |  | ||||||
|         mongoengine.connection._connection_settings = {} |  | ||||||
|         mongoengine.connection._connections = {} |  | ||||||
|         mongoengine.connection._dbs = {} |  | ||||||
|  |  | ||||||
|     def test_replicaset_uri_passes_read_preference(self): |  | ||||||
|         """Requires a replica set called "rs" on port 27017 |  | ||||||
|         """ |  | ||||||
|  |  | ||||||
|         try: |  | ||||||
|             conn = connect(db='mongoenginetest', host="mongodb://localhost/mongoenginetest?replicaSet=rs", read_preference=ReadPreference.SECONDARY_ONLY) |  | ||||||
|         except ConnectionError, e: |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         if not isinstance(conn, ReplicaSetConnection): |  | ||||||
|             return |  | ||||||
|  |  | ||||||
|         self.assertEquals(conn.read_preference, ReadPreference.SECONDARY_ONLY) |  | ||||||
|  |  | ||||||
| if __name__ == '__main__': |  | ||||||
|     unittest.main() |  | ||||||
| @@ -1,230 +0,0 @@ | |||||||
| # -*- coding: utf-8 -*- |  | ||||||
| import unittest |  | ||||||
|  |  | ||||||
| from mongoengine import * |  | ||||||
| from mongoengine import signals |  | ||||||
|  |  | ||||||
| signal_output = [] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class SignalTests(unittest.TestCase): |  | ||||||
|     """ |  | ||||||
|     Testing signals before/after saving and deleting. |  | ||||||
|     """ |  | ||||||
|  |  | ||||||
|     def get_signal_output(self, fn, *args, **kwargs): |  | ||||||
|         # Flush any existing signal output |  | ||||||
|         global signal_output |  | ||||||
|         signal_output = [] |  | ||||||
|         fn(*args, **kwargs) |  | ||||||
|         return signal_output |  | ||||||
|  |  | ||||||
|     def setUp(self): |  | ||||||
|         connect(db='mongoenginetest') |  | ||||||
|         class Author(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|             def __unicode__(self): |  | ||||||
|                 return self.name |  | ||||||
|  |  | ||||||
|             @classmethod |  | ||||||
|             def pre_init(cls, sender, document, *args, **kwargs): |  | ||||||
|                 signal_output.append('pre_init signal, %s' % cls.__name__) |  | ||||||
|                 signal_output.append(str(kwargs['values'])) |  | ||||||
|  |  | ||||||
|             @classmethod |  | ||||||
|             def post_init(cls, sender, document, **kwargs): |  | ||||||
|                 signal_output.append('post_init signal, %s' % document) |  | ||||||
|  |  | ||||||
|             @classmethod |  | ||||||
|             def pre_save(cls, sender, document, **kwargs): |  | ||||||
|                 signal_output.append('pre_save signal, %s' % document) |  | ||||||
|  |  | ||||||
|             @classmethod |  | ||||||
|             def post_save(cls, sender, document, **kwargs): |  | ||||||
|                 signal_output.append('post_save signal, %s' % document) |  | ||||||
|                 if 'created' in kwargs: |  | ||||||
|                     if kwargs['created']: |  | ||||||
|                         signal_output.append('Is created') |  | ||||||
|                     else: |  | ||||||
|                         signal_output.append('Is updated') |  | ||||||
|  |  | ||||||
|             @classmethod |  | ||||||
|             def pre_delete(cls, sender, document, **kwargs): |  | ||||||
|                 signal_output.append('pre_delete signal, %s' % document) |  | ||||||
|  |  | ||||||
|             @classmethod |  | ||||||
|             def post_delete(cls, sender, document, **kwargs): |  | ||||||
|                 signal_output.append('post_delete signal, %s' % document) |  | ||||||
|  |  | ||||||
|             @classmethod |  | ||||||
|             def pre_bulk_insert(cls, sender, documents, **kwargs): |  | ||||||
|                 signal_output.append('pre_bulk_insert signal, %s' % documents) |  | ||||||
|  |  | ||||||
|             @classmethod |  | ||||||
|             def post_bulk_insert(cls, sender, documents, **kwargs): |  | ||||||
|                 signal_output.append('post_bulk_insert signal, %s' % documents) |  | ||||||
|                 if kwargs.get('loaded', False): |  | ||||||
|                     signal_output.append('Is loaded') |  | ||||||
|                 else: |  | ||||||
|                     signal_output.append('Not loaded') |  | ||||||
|         self.Author = Author |  | ||||||
|  |  | ||||||
|  |  | ||||||
|         class Another(Document): |  | ||||||
|             name = StringField() |  | ||||||
|  |  | ||||||
|             def __unicode__(self): |  | ||||||
|                 return self.name |  | ||||||
|  |  | ||||||
|             @classmethod |  | ||||||
|             def pre_init(cls, sender, document, **kwargs): |  | ||||||
|                 signal_output.append('pre_init Another signal, %s' % cls.__name__) |  | ||||||
|                 signal_output.append(str(kwargs['values'])) |  | ||||||
|  |  | ||||||
|             @classmethod |  | ||||||
|             def post_init(cls, sender, document, **kwargs): |  | ||||||
|                 signal_output.append('post_init Another signal, %s' % document) |  | ||||||
|  |  | ||||||
|             @classmethod |  | ||||||
|             def pre_save(cls, sender, document, **kwargs): |  | ||||||
|                 signal_output.append('pre_save Another signal, %s' % document) |  | ||||||
|  |  | ||||||
|             @classmethod |  | ||||||
|             def post_save(cls, sender, document, **kwargs): |  | ||||||
|                 signal_output.append('post_save Another signal, %s' % document) |  | ||||||
|                 if 'created' in kwargs: |  | ||||||
|                     if kwargs['created']: |  | ||||||
|                         signal_output.append('Is created') |  | ||||||
|                     else: |  | ||||||
|                         signal_output.append('Is updated') |  | ||||||
|  |  | ||||||
|             @classmethod |  | ||||||
|             def pre_delete(cls, sender, document, **kwargs): |  | ||||||
|                 signal_output.append('pre_delete Another signal, %s' % document) |  | ||||||
|  |  | ||||||
|             @classmethod |  | ||||||
|             def post_delete(cls, sender, document, **kwargs): |  | ||||||
|                 signal_output.append('post_delete Another signal, %s' % document) |  | ||||||
|  |  | ||||||
|         self.Another = Another |  | ||||||
|         # Save up the number of connected signals so that we can check at the end |  | ||||||
|         # that all the signals we register get properly unregistered |  | ||||||
|         self.pre_signals = ( |  | ||||||
|             len(signals.pre_init.receivers), |  | ||||||
|             len(signals.post_init.receivers), |  | ||||||
|             len(signals.pre_save.receivers), |  | ||||||
|             len(signals.post_save.receivers), |  | ||||||
|             len(signals.pre_delete.receivers), |  | ||||||
|             len(signals.post_delete.receivers), |  | ||||||
|             len(signals.pre_bulk_insert.receivers), |  | ||||||
|             len(signals.post_bulk_insert.receivers), |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         signals.pre_init.connect(Author.pre_init, sender=Author) |  | ||||||
|         signals.post_init.connect(Author.post_init, sender=Author) |  | ||||||
|         signals.pre_save.connect(Author.pre_save, sender=Author) |  | ||||||
|         signals.post_save.connect(Author.post_save, sender=Author) |  | ||||||
|         signals.pre_delete.connect(Author.pre_delete, sender=Author) |  | ||||||
|         signals.post_delete.connect(Author.post_delete, sender=Author) |  | ||||||
|         signals.pre_bulk_insert.connect(Author.pre_bulk_insert, sender=Author) |  | ||||||
|         signals.post_bulk_insert.connect(Author.post_bulk_insert, sender=Author) |  | ||||||
|  |  | ||||||
|         signals.pre_init.connect(Another.pre_init, sender=Another) |  | ||||||
|         signals.post_init.connect(Another.post_init, sender=Another) |  | ||||||
|         signals.pre_save.connect(Another.pre_save, sender=Another) |  | ||||||
|         signals.post_save.connect(Another.post_save, sender=Another) |  | ||||||
|         signals.pre_delete.connect(Another.pre_delete, sender=Another) |  | ||||||
|         signals.post_delete.connect(Another.post_delete, sender=Another) |  | ||||||
|  |  | ||||||
|     def tearDown(self): |  | ||||||
|         signals.pre_init.disconnect(self.Author.pre_init) |  | ||||||
|         signals.post_init.disconnect(self.Author.post_init) |  | ||||||
|         signals.post_delete.disconnect(self.Author.post_delete) |  | ||||||
|         signals.pre_delete.disconnect(self.Author.pre_delete) |  | ||||||
|         signals.post_save.disconnect(self.Author.post_save) |  | ||||||
|         signals.pre_save.disconnect(self.Author.pre_save) |  | ||||||
|         signals.pre_bulk_insert.disconnect(self.Author.pre_bulk_insert) |  | ||||||
|         signals.post_bulk_insert.disconnect(self.Author.post_bulk_insert) |  | ||||||
|  |  | ||||||
|         signals.pre_init.disconnect(self.Another.pre_init) |  | ||||||
|         signals.post_init.disconnect(self.Another.post_init) |  | ||||||
|         signals.post_delete.disconnect(self.Another.post_delete) |  | ||||||
|         signals.pre_delete.disconnect(self.Another.pre_delete) |  | ||||||
|         signals.post_save.disconnect(self.Another.post_save) |  | ||||||
|         signals.pre_save.disconnect(self.Another.pre_save) |  | ||||||
|  |  | ||||||
|         # Check that all our signals got disconnected properly. |  | ||||||
|         post_signals = ( |  | ||||||
|             len(signals.pre_init.receivers), |  | ||||||
|             len(signals.post_init.receivers), |  | ||||||
|             len(signals.pre_save.receivers), |  | ||||||
|             len(signals.post_save.receivers), |  | ||||||
|             len(signals.pre_delete.receivers), |  | ||||||
|             len(signals.post_delete.receivers), |  | ||||||
|             len(signals.pre_bulk_insert.receivers), |  | ||||||
|             len(signals.post_bulk_insert.receivers), |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         self.assertEqual(self.pre_signals, post_signals) |  | ||||||
|  |  | ||||||
|     def test_model_signals(self): |  | ||||||
|         """ Model saves should throw some signals. """ |  | ||||||
|  |  | ||||||
|         def create_author(): |  | ||||||
|             a1 = self.Author(name='Bill Shakespeare') |  | ||||||
|  |  | ||||||
|         def bulk_create_author_with_load(): |  | ||||||
|             a1 = self.Author(name='Bill Shakespeare') |  | ||||||
|             self.Author.objects.insert([a1], load_bulk=True) |  | ||||||
|  |  | ||||||
|         def bulk_create_author_without_load(): |  | ||||||
|             a1 = self.Author(name='Bill Shakespeare') |  | ||||||
|             self.Author.objects.insert([a1], load_bulk=False) |  | ||||||
|  |  | ||||||
|         self.assertEqual(self.get_signal_output(create_author), [ |  | ||||||
|             "pre_init signal, Author", |  | ||||||
|             "{'name': 'Bill Shakespeare'}", |  | ||||||
|             "post_init signal, Bill Shakespeare", |  | ||||||
|         ]) |  | ||||||
|  |  | ||||||
|         a1 = self.Author(name='Bill Shakespeare') |  | ||||||
|         self.assertEqual(self.get_signal_output(a1.save), [ |  | ||||||
|             "pre_save signal, Bill Shakespeare", |  | ||||||
|             "post_save signal, Bill Shakespeare", |  | ||||||
|             "Is created" |  | ||||||
|         ]) |  | ||||||
|  |  | ||||||
|         a1.reload() |  | ||||||
|         a1.name='William Shakespeare' |  | ||||||
|         self.assertEqual(self.get_signal_output(a1.save), [ |  | ||||||
|             "pre_save signal, William Shakespeare", |  | ||||||
|             "post_save signal, William Shakespeare", |  | ||||||
|             "Is updated" |  | ||||||
|         ]) |  | ||||||
|  |  | ||||||
|         self.assertEqual(self.get_signal_output(a1.delete), [ |  | ||||||
|             'pre_delete signal, William Shakespeare', |  | ||||||
|             'post_delete signal, William Shakespeare', |  | ||||||
|         ]) |  | ||||||
|  |  | ||||||
|         signal_output = self.get_signal_output(bulk_create_author_with_load) |  | ||||||
|  |  | ||||||
|         # The output of this signal is not entirely deterministic. The reloaded |  | ||||||
|         # object will have an object ID. Hence, we only check part of the output |  | ||||||
|         self.assertEquals(signal_output[3], |  | ||||||
|             "pre_bulk_insert signal, [<Author: Bill Shakespeare>]") |  | ||||||
|         self.assertEquals(signal_output[-2:], |  | ||||||
|             ["post_bulk_insert signal, [<Author: Bill Shakespeare>]", |  | ||||||
|              "Is loaded",]) |  | ||||||
|  |  | ||||||
|         self.assertEqual(self.get_signal_output(bulk_create_author_without_load), [ |  | ||||||
|             "pre_init signal, Author", |  | ||||||
|             "{'name': 'Bill Shakespeare'}", |  | ||||||
|             "post_init signal, Bill Shakespeare", |  | ||||||
|             "pre_bulk_insert signal, [<Author: Bill Shakespeare>]", |  | ||||||
|             "post_bulk_insert signal, [<Author: Bill Shakespeare>]", |  | ||||||
|             "Not loaded", |  | ||||||
|         ]) |  | ||||||
|  |  | ||||||
|         self.Author.objects.delete() |  | ||||||
		Reference in New Issue
	
	Block a user